Merge branch 'dev' into 5518-log-user-role
gitea/salix/pipeline/head There was a failure building this commit Details

This commit is contained in:
Alexandre Riera 2023-04-17 09:53:36 +00:00
commit 7d8a0597c1
109 changed files with 903 additions and 1306 deletions

View File

@ -5,6 +5,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2316.01] - 2023-05-04
### Added
-
### Changed
-
### Fixed
-
## [2314.01] - 2023-04-20
### Added
@ -12,9 +23,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
- (Facturas recibidas -> Bases negativas) Nueva sección
### Changed
-
### Fixed
- (Clientes -> Morosos) Ahora se mantienen los elementos seleccionados al hacer sroll.

View File

@ -1,7 +1,6 @@
const UserError = require('vn-loopback/util/user-error');
const fs = require('fs-extra');
const fs = require('fs/promises');
const path = require('path');
const uuid = require('uuid');
module.exports = Self => {
Self.remoteMethodCtx('upload', {
@ -36,7 +35,7 @@ module.exports = Self => {
const fileOptions = {};
const args = ctx.args;
let srcFile;
let tempFilePath;
try {
const hasWriteRole = await models.ImageCollection.hasWriteRole(ctx, args.collection);
if (!hasWriteRole)
@ -53,15 +52,20 @@ module.exports = Self => {
});
const file = await TempContainer.getFile(tempContainer.name, uploadedFile.name);
srcFile = path.join(file.client.root, file.container, file.name);
tempFilePath = path.join(file.client.root, file.container, file.name);
const fileName = `${uuid.v4()}.png`;
await models.Image.registerImage(args.collection, srcFile, fileName, args.id);
} catch (e) {
if (fs.existsSync(srcFile))
await fs.unlink(srcFile);
const fileName = `${args.id}.png`;
throw e;
await models.Image.resize({
collectionName: args.collection,
srcFile: tempFilePath,
fileName: fileName,
entityId: args.id
});
} finally {
try {
await fs.unlink(tempFilePath);
} catch (error) { }
}
};
};

View File

@ -1,161 +1,110 @@
const fs = require('fs-extra');
const sharp = require('sharp');
const path = require('path');
const readChunk = require('read-chunk');
const imageType = require('image-type');
const bmp = require('bmp-js');
const gm = require('gm');
module.exports = Self => {
require('../methods/image/download')(Self);
require('../methods/image/upload')(Self);
// Function extracted from jimp package (utils)
function scan(image, x, y, w, h, f) {
// round input
x = Math.round(x);
y = Math.round(y);
w = Math.round(w);
h = Math.round(h);
for (let _y = y; _y < y + h; _y++) {
for (let _x = x; _x < x + w; _x++) {
const idx = (image.bitmap.width * _y + _x) << 2;
f.call(image, _x, _y, idx);
}
}
return image;
}
// Function extracted from jimp package (type-bmp)
function fromAGBR(bitmap) {
return scan({bitmap}, 0, 0, bitmap.width, bitmap.height, function(
x,
y,
index
) {
const alpha = this.bitmap.data[index + 0];
const blue = this.bitmap.data[index + 1];
const green = this.bitmap.data[index + 2];
const red = this.bitmap.data[index + 3];
this.bitmap.data[index + 0] = red;
this.bitmap.data[index + 1] = green;
this.bitmap.data[index + 2] = blue;
this.bitmap.data[index + 3] = bitmap.is_with_alpha ? alpha : 0xff;
}).bitmap;
}
Self.registerImage = async(collectionName, srcFilePath, fileName, entityId) => {
Self.resize = async function({collectionName, srcFile, fileName, entityId}) {
const models = Self.app.models;
const tx = await Self.beginTransaction({});
const myOptions = {transaction: tx};
try {
const collection = await models.ImageCollection.findOne({
const collection = await models.ImageCollection.findOne(
{
fields: [
'id',
'name',
'maxWidth',
'maxHeight',
'model',
'property'
'property',
],
where: {name: collectionName},
include: {
relation: 'sizes',
scope: {
fields: ['width', 'height', 'crop']
}
}
}, myOptions);
fields: ['width', 'height', 'crop'],
},
},
}
);
const data = {
// Insert image row
await models.Image.upsertWithWhere(
{
name: fileName,
collectionFk: collectionName
};
const newImage = await Self.upsertWithWhere(data, {
},
{
name: fileName,
collectionFk: collectionName,
updated: Date.vnNow()
}, myOptions);
// Resizes and saves the image
const container = await models.ImageContainer.container(collectionName);
const rootPath = container.client.root;
const collectionDir = path.join(rootPath, collectionName);
const dstDir = path.join(collectionDir, 'full');
const dstFile = path.join(dstDir, fileName);
const buffer = readChunk.sync(srcFilePath, 0, 12);
const type = imageType(buffer);
let sharpOptions;
let imgSrc = srcFilePath;
if (type.mime == 'image/bmp') {
const bmpBuffer = fs.readFileSync(srcFilePath);
const bmpData = fromAGBR(bmp.decode(bmpBuffer));
imgSrc = bmpData.data;
sharpOptions = {
raw: {
width: bmpData.width,
height: bmpData.height,
channels: 4
},
failOn: 'none'
};
updated: Date.vnNow() / 1000,
}
);
const resizeOpts = {
withoutEnlargement: true,
fit: 'inside'
};
// Update entity image file name
const model = models[collection.model];
if (!model) throw new Error('No matching model found');
await fs.mkdir(dstDir, {recursive: true});
await sharp(imgSrc, sharpOptions)
.resize(collection.maxWidth, collection.maxHeight, resizeOpts)
.png()
.toFile(dstFile);
const entity = await model.findById(entityId);
if (entity) {
await entity.updateAttribute(
collection.property,
fileName
);
}
const sizes = collection.sizes();
for (let size of sizes) {
const dstDir = path.join(collectionDir, `${size.width}x${size.height}`);
const dstFile = path.join(dstDir, fileName);
const resizeOpts = {
withoutEnlargement: true,
fit: size.crop ? 'cover' : 'inside'
};
// Resize
const container = await models.ImageContainer.container(
collectionName
);
const rootPath = container.client.root;
const collectionDir = path.join(rootPath, collectionName);
await fs.mkdir(dstDir, {recursive: true});
await sharp(imgSrc, sharpOptions)
.resize(size.width, size.height, resizeOpts)
.png()
.toFile(dstFile);
}
// To max size
const {maxWidth, maxHeight} = collection;
const fullSizePath = path.join(collectionDir, 'full');
const toFullSizePath = `${fullSizePath}/${fileName}`;
const model = models[collection.model];
await fs.mkdir(fullSizePath, {recursive: true});
await new Promise((resolve, reject) => {
gm(srcFile)
.resize(maxWidth, maxHeight, '>')
.setFormat('png')
.quality(100)
.write(toFullSizePath, function(err) {
if (err) reject(err);
if (!err) resolve();
});
});
if (!model)
throw new Error('Matching model not found');
// To collection sizes
for (const size of collection.sizes()) {
const {width, height} = size;
const item = await model.findById(entityId, null, myOptions);
if (item) {
await item.updateAttribute(
collection.property,
fileName,
myOptions
);
}
const sizePath = path.join(collectionDir, `${width}x${height}`);
const toSizePath = `${sizePath}/${fileName}`;
if (fs.existsSync(srcFilePath))
await fs.unlink(srcFilePath);
await fs.mkdir(sizePath, {recursive: true});
await new Promise((resolve, reject) => {
const gmInstance = gm(srcFile);
await tx.commit();
if (size.crop) {
gmInstance
.resize(width, height, '^')
.gravity('Center')
.crop(width, height);
}
return newImage;
} catch (e) {
await tx.rollback();
throw e;
if (!size.crop) gmInstance.resize(width, height, '>');
gmInstance
.setFormat('png')
.quality(100)
.write(toSizePath, function(err) {
if (err) reject(err);
if (!err) resolve();
});
});
}
};
};

View File

@ -0,0 +1 @@
ALTER TABLE `vn`.`ticketConfig` ADD daysForWarningClaim INT DEFAULT 2 NOT NULL COMMENT 'dias restantes hasta que salte el aviso de reclamación fuerade plazo';

View File

@ -0,0 +1,74 @@
DROP TABLE `vn`.`dmsRecover`;
ALTER TABLE `vn`.`delivery` DROP FOREIGN KEY delivery_FK;
ALTER TABLE `vn`.`delivery` DROP COLUMN addressFk;
ALTER TABLE `vn`.`delivery` ADD ticketFk INT NOT NULL;
ALTER TABLE `vn`.`delivery` ADD CONSTRAINT delivery_ticketFk_FK FOREIGN KEY (`ticketFk`) REFERENCES `vn`.`ticket`(`id`);
DELETE FROM `salix`.`ACL` WHERE `property` = 'saveSign';
INSERT INTO `salix`.`ACL` (`model`,`property`,`accessType`,`permission`,`principalId`)
VALUES
('Ticket','saveSign','WRITE','ALLOW','employee');
DROP PROCEDURE IF EXISTS vn.route_getTickets;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`route_getTickets`(vRouteFk INT)
BEGIN
/**
* Pasado un RouteFk devuelve la información
* de sus tickets.
*
* @param vRouteFk
*
* @select Información de los tickets
*/
SELECT
t.id Id,
t.clientFk Client,
a.id Address,
t.packages Packages,
a.street AddressName,
a.postalCode PostalCode,
a.city City,
sub2.itemPackingTypeFk PackingType,
c.phone ClientPhone,
c.mobile ClientMobile,
a.phone AddressPhone,
a.mobile AddressMobile,
d.longitude Longitude,
d.latitude Latitude,
wm.mediaValue SalePersonPhone,
tob.Note Note,
t.isSigned Signed
FROM ticket t
JOIN client c ON t.clientFk = c.id
JOIN address a ON t.addressFk = a.id
LEFT JOIN delivery d ON t.id = d.ticketFk
LEFT JOIN workerMedia wm ON wm.workerFk = c.salesPersonFk
LEFT JOIN
(SELECT tob.description Note, t.id
FROM ticketObservation tob
JOIN ticket t ON tob.ticketFk = t.id
JOIN observationType ot ON ot.id = tob.observationTypeFk
WHERE t.routeFk = vRouteFk
AND ot.code = 'delivery'
)tob ON tob.id = t.id
LEFT JOIN
(SELECT sub.ticketFk,
CONCAT('(', GROUP_CONCAT(DISTINCT sub.itemPackingTypeFk ORDER BY sub.items DESC SEPARATOR ','), ') ') itemPackingTypeFk
FROM (SELECT s.ticketFk , i.itemPackingTypeFk, COUNT(*) items
FROM ticket t
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
WHERE t.routeFk = vRouteFk
GROUP BY t.id,i.itemPackingTypeFk)sub
GROUP BY sub.ticketFk
) sub2 ON sub2.ticketFk = t.id
WHERE t.routeFk = vRouteFk
GROUP BY t.id
ORDER BY t.priority;
END$$
DELIMITER ;

View File

@ -0,0 +1,67 @@
DELETE FROM `salix`.`ACL` WHERE `property` = 'saveSign';
INSERT INTO `salix`.`ACL` (`model`,`property`,`accessType`,`permission`,`principalId`)
VALUES
('Ticket','saveSign','WRITE','ALLOW','employee');
DROP PROCEDURE IF EXISTS vn.route_getTickets;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`route_getTickets`(vRouteFk INT)
BEGIN
/**
* Pasado un RouteFk devuelve la información
* de sus tickets.
*
* @param vRouteFk
*
* @select Información de los tickets
*/
SELECT
t.id Id,
t.clientFk Client,
a.id Address,
t.packages Packages,
a.street AddressName,
a.postalCode PostalCode,
a.city City,
sub2.itemPackingTypeFk PackingType,
c.phone ClientPhone,
c.mobile ClientMobile,
a.phone AddressPhone,
a.mobile AddressMobile,
d.longitude Longitude,
d.latitude Latitude,
wm.mediaValue SalePersonPhone,
tob.Note Note,
t.isSigned Signed
FROM ticket t
JOIN client c ON t.clientFk = c.id
JOIN address a ON t.addressFk = a.id
LEFT JOIN delivery d ON t.id = d.ticketFk
LEFT JOIN workerMedia wm ON wm.workerFk = c.salesPersonFk
LEFT JOIN
(SELECT tob.description Note, t.id
FROM ticketObservation tob
JOIN ticket t ON tob.ticketFk = t.id
JOIN observationType ot ON ot.id = tob.observationTypeFk
WHERE t.routeFk = vRouteFk
AND ot.code = 'delivery'
)tob ON tob.id = t.id
LEFT JOIN
(SELECT sub.ticketFk,
CONCAT('(', GROUP_CONCAT(DISTINCT sub.itemPackingTypeFk ORDER BY sub.items DESC SEPARATOR ','), ') ') itemPackingTypeFk
FROM (SELECT s.ticketFk , i.itemPackingTypeFk, COUNT(*) items
FROM ticket t
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
WHERE t.routeFk = vRouteFk
GROUP BY t.id,i.itemPackingTypeFk)sub
GROUP BY sub.ticketFk
) sub2 ON sub2.ticketFk = t.id
WHERE t.routeFk = vRouteFk
GROUP BY t.id
ORDER BY t.priority;
END$$
DELIMITER ;

View File

@ -0,0 +1,83 @@
CREATE TABLE `vn`.`dmsRecover` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`ticketFk` int(11) DEFAULT NULL,
`sign` text DEFAULT NULL,
`created` timestamp NULL DEFAULT current_timestamp(),
PRIMARY KEY (`id`),
KEY `ticketFk_idx` (`ticketFk`),
CONSTRAINT `ticketFk` FOREIGN KEY (`ticketFk`) REFERENCES `ticket` (`id`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB AUTO_INCREMENT=31917 DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
ALTER TABLE `vn`.`delivery` ADD addressFk INT;
DROP PROCEDURE IF EXISTS `vn`.`route_getTickets`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`route_getTickets`(vRouteFk INT)
BEGIN
/**
* Pasado un RouteFk devuelve la información
* de sus tickets.
*
* @param vRouteFk
* @select Información de los tickets
*/
SELECT *
FROM (
SELECT t.id Id,
t.clientFk Client,
a.id Address,
a.nickname ClientName,
t.packages Packages,
a.street AddressName,
a.postalCode PostalCode,
a.city City,
sub2.itemPackingTypeFk PackingType,
c.phone ClientPhone,
c.mobile ClientMobile,
a.phone AddressPhone,
a.mobile AddressMobile,
d.longitude Longitude,
d.latitude Latitude,
wm.mediaValue SalePersonPhone,
tob.description Note,
t.isSigned Signed,
t.priority
FROM ticket t
JOIN client c ON t.clientFk = c.id
JOIN address a ON t.addressFk = a.id
LEFT JOIN delivery d ON d.addressFk = a.id
LEFT JOIN workerMedia wm ON wm.workerFk = c.salesPersonFk
LEFT JOIN(
SELECT tob.description, t.id
FROM ticketObservation tob
JOIN ticket t ON tob.ticketFk = t.id
JOIN observationType ot ON ot.id = tob.observationTypeFk
WHERE t.routeFk = vRouteFk
AND ot.code = 'delivery'
)tob ON tob.id = t.id
LEFT JOIN(
SELECT sub.ticketFk,
CONCAT('(',
GROUP_CONCAT(DISTINCT sub.itemPackingTypeFk
ORDER BY sub.items DESC SEPARATOR ','),
') ') itemPackingTypeFk
FROM (
SELECT s.ticketFk, i.itemPackingTypeFk, COUNT(*) items
FROM ticket t
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
WHERE t.routeFk = vRouteFk
GROUP BY t.id, i.itemPackingTypeFk
)sub
GROUP BY sub.ticketFk
)sub2 ON sub2.ticketFk = t.id
WHERE t.routeFk = vRouteFk
ORDER BY d.id DESC
LIMIT 10000000000000000000
)sub3
GROUP BY sub3.id
ORDER BY sub3.priority;
END$$
DELIMITER ;

View File

@ -0,0 +1 @@
DROP TRIGGER IF EXISTS `vn`.`claimBeginning_afterInsert`;

View File

@ -0,0 +1,70 @@
DROP TABLE IF EXISTS `vn`.`dmsRecover`;
ALTER TABLE `vn`.`delivery` DROP COLUMN addressFk;
ALTER TABLE `vn`.`delivery` DROP CONSTRAINT delivery_ticketFk_FK;
ALTER TABLE `vn`.`delivery` DROP COLUMN ticketFk;
ALTER TABLE `vn`.`delivery` ADD ticketFk INT DEFAULT NULL;
ALTER TABLE `vn`.`delivery` ADD CONSTRAINT delivery_ticketFk_FK FOREIGN KEY (`ticketFk`) REFERENCES `vn`.`ticket`(`id`);
DROP PROCEDURE IF EXISTS vn.route_getTickets;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`route_getTickets`(vRouteFk INT)
BEGIN
/**
* Pasado un RouteFk devuelve la información
* de sus tickets.
*
* @param vRouteFk
*
* @select Información de los tickets
*/
SELECT
t.id Id,
t.clientFk Client,
a.id Address,
t.packages Packages,
a.street AddressName,
a.postalCode PostalCode,
a.city City,
sub2.itemPackingTypeFk PackingType,
c.phone ClientPhone,
c.mobile ClientMobile,
a.phone AddressPhone,
a.mobile AddressMobile,
d.longitude Longitude,
d.latitude Latitude,
wm.mediaValue SalePersonPhone,
tob.Note Note,
t.isSigned Signed
FROM ticket t
JOIN client c ON t.clientFk = c.id
JOIN address a ON t.addressFk = a.id
LEFT JOIN delivery d ON t.id = d.ticketFk
LEFT JOIN workerMedia wm ON wm.workerFk = c.salesPersonFk
LEFT JOIN
(SELECT tob.description Note, t.id
FROM ticketObservation tob
JOIN ticket t ON tob.ticketFk = t.id
JOIN observationType ot ON ot.id = tob.observationTypeFk
WHERE t.routeFk = vRouteFk
AND ot.code = 'delivery'
)tob ON tob.id = t.id
LEFT JOIN
(SELECT sub.ticketFk,
CONCAT('(', GROUP_CONCAT(DISTINCT sub.itemPackingTypeFk ORDER BY sub.items DESC SEPARATOR ','), ') ') itemPackingTypeFk
FROM (SELECT s.ticketFk , i.itemPackingTypeFk, COUNT(*) items
FROM ticket t
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
WHERE t.routeFk = vRouteFk
GROUP BY t.id,i.itemPackingTypeFk)sub
GROUP BY sub.ticketFk
) sub2 ON sub2.ticketFk = t.id
WHERE t.routeFk = vRouteFk
GROUP BY t.id
ORDER BY t.priority;
END$$
DELIMITER ;

View File

View File

@ -1774,12 +1774,12 @@ INSERT INTO `vn`.`claimState`(`id`, `code`, `description`, `roleFk`, `priority`,
( 6, 'mana', 'Mana', 72, 4, 0),
( 7, 'lack', 'Faltas', 72, 2, 0);
INSERT INTO `vn`.`claim`(`id`, `ticketCreated`, `claimStateFk`, `clientFk`, `workerFk`, `responsibility`, `isChargedToMana`, `created`, `packages`, `rma`)
INSERT INTO `vn`.`claim`(`id`, `ticketCreated`, `claimStateFk`, `clientFk`, `workerFk`, `responsibility`, `isChargedToMana`, `created`, `packages`, `rma`, `ticketFk`)
VALUES
(1, util.VN_CURDATE(), 1, 1101, 18, 3, 0, util.VN_CURDATE(), 0, '02676A049183'),
(2, util.VN_CURDATE(), 2, 1101, 18, 3, 0, util.VN_CURDATE(), 1, NULL),
(3, util.VN_CURDATE(), 3, 1101, 18, 1, 1, util.VN_CURDATE(), 5, NULL),
(4, util.VN_CURDATE(), 3, 1104, 18, 5, 0, util.VN_CURDATE(), 10, NULL);
(1, util.VN_CURDATE(), 1, 1101, 18, 3, 0, util.VN_CURDATE(), 0, '02676A049183', 11),
(2, util.VN_CURDATE(), 2, 1101, 18, 3, 0, util.VN_CURDATE(), 1, NULL, 16),
(3, util.VN_CURDATE(), 3, 1101, 18, 1, 1, util.VN_CURDATE(), 5, NULL, 7),
(4, util.VN_CURDATE(), 3, 1104, 18, 5, 0, util.VN_CURDATE(), 10, NULL, 8);
INSERT INTO `vn`.`claimObservation` (`claimFk`, `workerFk`, `text`, `created`)
VALUES

View File

@ -28,22 +28,4 @@ describe('Client log path', () => {
it('should navigate to the log section', async() => {
await page.accessToSection('client.card.log');
});
it('should check the previous value of the last logged change', async() => {
let lastModificationPreviousValue = await page
.waitToGetProperty(selectors.clientLog.lastModificationPreviousValue, 'innerText');
expect(lastModificationPreviousValue).toContain('DavidCharlesHaller');
});
it('should check the current value of the last logged change', async() => {
let lastModificationPreviousValue = await page
.waitToGetProperty(selectors.clientLog.lastModificationPreviousValue, 'innerText');
let lastModificationCurrentValue = await page.
waitToGetProperty(selectors.clientLog.lastModificationCurrentValue, 'innerText');
expect(lastModificationPreviousValue).toEqual('DavidCharlesHaller');
expect(lastModificationCurrentValue).toEqual('this is a test');
});
});

View File

@ -42,23 +42,4 @@ describe('Item log path', () => {
await page.waitForSelector(selectors.itemsIndex.createItemButton);
await page.waitForState('item.index');
});
it(`should search for the created item and navigate to it's log section`, async() => {
await page.accessToSearchResult('Knowledge artifact');
await page.accessToSection('item.card.log');
});
it(`should confirm the log is showing 4 entries`, async() => {
await page.waitForSelector(selectors.itemLog.anyLineCreated);
const anyLineCreatedCount = await page.countElement(selectors.itemLog.anyLineCreated);
expect(anyLineCreatedCount).toEqual(4);
});
xit(`should confirm the log is showing the intrastat for the created item`, async() => {
const fifthLineCreatedProperty = await page
.waitToGetProperty(selectors.itemLog.fifthLineCreatedProperty, 'innerText');
expect(fifthLineCreatedProperty).toEqual('05080000');
});
});

View File

@ -29,20 +29,4 @@ describe('Ticket expeditions and log path', () => {
expect(result).toEqual(3);
});
it(`should confirm the expedition deleted is shown now in the ticket log`, async() => {
await page.accessToSection('ticket.card.log');
const user = await page
.waitToGetProperty(selectors.ticketLog.user, 'innerText');
const action = await page
.waitToGetProperty(selectors.ticketLog.action, 'innerText');
const id = await page
.waitToGetProperty(selectors.ticketLog.id, 'innerText');
expect(user).toContain('production');
expect(action).toContain('Deletes');
expect(id).toEqual('2');
});
});

View File

@ -31,30 +31,4 @@ describe('Ticket log path', () => {
expect(message.text).toContain('Data saved!');
});
it('should navigate to the log section', async() => {
await page.accessToSection('ticket.card.log');
});
it('should set the viewport width to 1920 to see the table full width', async() => {
await page.setViewport({
width: 1920,
height: 0,
});
const result = await page.waitToGetProperty(selectors.ticketLog.firstTD, 'innerText');
expect(result.length).not.toBeGreaterThan('20');
});
it('should set the viewport width to 800 to see the table shrink and move data to the 1st column', async() => {
await page.setViewport({
width: 800,
height: 0,
});
const result = await page.waitToGetProperty(selectors.ticketLog.firstTD, 'innerText');
expect(result.length).toBeGreaterThan('15');
});
});

View File

@ -29,14 +29,4 @@ describe('Zone descriptor path', () => {
expect(count).toEqual(0);
});
it('should check the ticket whom lost the zone and see evidence on the logs', async() => {
await page.waitToClick(selectors.globalItems.homeButton);
await page.selectModule('ticket');
await page.accessToSearchResult('20');
await page.accessToSection('ticket.card.log');
const lastChanges = await page.waitToGetProperty(selectors.ticketLog.changes, 'innerText');
expect(lastChanges).toContain('1');
});
});

View File

@ -64,14 +64,4 @@ describe('Supplier basic data path', () => {
expect(result).toEqual('Some notes');
});
it('should navigate to the log section', async() => {
await page.accessToSection('supplier.card.log');
});
it('should check the changes have been recorded', async() => {
const result = await page.waitToGetProperty('vn-tr table tr:nth-child(3) td.after', 'innerText');
expect(result).toEqual('Some notes');
});
});

View File

@ -162,14 +162,8 @@ export default class UploadPhoto extends Component {
if (!this.newPhoto.files)
throw new Error(`Select an image`);
const viewportType = this.viewportSelection;
const output = viewportType.output;
const options = {
type: 'blob',
size: {
width: output.width,
height: output.height
}
};
return this.editor.result(options)
.then(blob => this.newPhoto.blob = blob)

View File

@ -1,6 +1,20 @@
const app = require('vn-loopback/server/server');
const LoopBackContext = require('loopback-context');
describe('Model crud()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
let insertId;
const barcodeModel = app.models.ItemBarcode;

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('Model rewriteDbError()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should extend rewriteDbError properties to any model passed', () => {
const exampleModel = models.ItemTag;

View File

@ -274,5 +274,6 @@
"This ticket cannot be signed because it has not been boxed": "Este ticket no puede firmarse porque no ha sido encajado",
"Insert a date range": "Inserte un rango de fechas",
"Added observation": "{{user}} añadió esta observacion: {{text}}",
"Comment added to client": "Observación añadida al cliente {{clientFk}}"
"Comment added to client": "Observación añadida al cliente {{clientFk}}",
"Cannot create a new claimBeginning from a different ticket": "No se puede crear una línea de reclamación de un ticket diferente al origen"
}

View File

@ -1,41 +1,9 @@
const mysql = require('mysql');
const MySQL = require('loopback-connector-mysql').MySQL;
const EnumFactory = require('loopback-connector-mysql').EnumFactory;
const { Transaction, SQLConnector, ParameterizedSQL } = require('loopback-connector');
const {Transaction, SQLConnector, ParameterizedSQL} = require('loopback-connector');
const fs = require('fs');
const limitSet = new Set([
'save',
'updateOrCreate',
'replaceOrCreate',
'replaceById',
'update'
]);
const opOpts = {
update: [
'update',
'replaceById',
// |insert
'save',
'updateOrCreate',
'replaceOrCreate'
],
delete: [
'destroy',
'destroyAll'
],
insert: [
'create'
]
};
const opMap = new Map();
for (const op in opOpts) {
for (const met of opOpts[op])
opMap.set(met, op);
}
class VnMySQL extends MySQL {
/**
* Promisified version of execute().
@ -253,49 +221,49 @@ class VnMySQL extends MySQL {
}
create(model, data, opts, cb) {
const ctx = { data };
const ctx = {data};
this.invokeMethod('create',
arguments, model, ctx, opts, cb);
}
createAll(model, data, opts, cb) {
const ctx = { data };
const ctx = {data};
this.invokeMethod('createAll',
arguments, model, ctx, opts, cb);
}
save(model, data, opts, cb) {
const ctx = { data };
const ctx = {data};
this.invokeMethod('save',
arguments, model, ctx, opts, cb);
}
updateOrCreate(model, data, opts, cb) {
const ctx = { data };
const ctx = {data};
this.invokeMethod('updateOrCreate',
arguments, model, ctx, opts, cb);
}
replaceOrCreate(model, data, opts, cb) {
const ctx = { data };
const ctx = {data};
this.invokeMethod('replaceOrCreate',
arguments, model, ctx, opts, cb);
}
destroyAll(model, where, opts, cb) {
const ctx = { where };
const ctx = {where};
this.invokeMethod('destroyAll',
arguments, model, ctx, opts, cb);
}
update(model, where, data, opts, cb) {
const ctx = { where, data };
const ctx = {where, data};
this.invokeMethod('update',
arguments, model, ctx, opts, cb);
}
replaceById(model, id, data, opts, cb) {
const ctx = { id, data };
const ctx = {id, data};
this.invokeMethod('replaceById',
arguments, model, ctx, opts, cb);
}
@ -311,91 +279,34 @@ class VnMySQL extends MySQL {
return super[method].apply(this, args);
this.invokeMethodP(method, [...args], model, ctx, opts)
.then(res => cb(...res), cb);
.then(res => cb(...[null].concat(res)), cb);
}
async invokeMethodP(method, args, model, ctx, opts) {
const Model = this.getModelDefinition(model).model;
const settings = Model.definition.settings;
let tx;
if (!opts.transaction) {
tx = await Transaction.begin(this, {});
opts = Object.assign({ transaction: tx, httpCtx: opts.httpCtx }, opts);
opts = Object.assign({transaction: tx, httpCtx: opts.httpCtx}, opts);
}
try {
// Fetch old values (update|delete) or login
let where, id, data, idName, limit, op, oldInstances, newInstances;
const hasGrabUser = settings.log && settings.log.grabUser;
if (hasGrabUser) {
const userId = opts.httpCtx && opts.httpCtx.active.accessToken.userId;
const user = await Model.app.models.Account.findById(userId, { fields: ['name'] }, opts);
const userId = opts.httpCtx && opts.httpCtx.active.accessToken.userId;
if (userId) {
const user = await Model.app.models.Account.findById(userId, {fields: ['name']}, opts);
await this.executeP(`CALL account.myUser_loginWithName(?)`, [user.name], opts);
}
else {
where = ctx.where;
id = ctx.id;
data = ctx.data;
idName = this.idName(model);
limit = limitSet.has(method);
op = opMap.get(method);
if (!where) {
if (id) where = { [idName]: id };
else where = { [idName]: data[idName] };
}
// Fetch old values
switch (op) {
case 'update':
case 'delete':
// Single entity operation
const stmt = this.buildSelectStmt(op, data, idName, model, where, limit);
stmt.merge(`FOR UPDATE`);
oldInstances = await this.executeStmt(stmt, opts);
}
}
const res = await new Promise(resolve => {
const res = await new Promise((resolve, reject) => {
const fnArgs = args.slice(0, -2);
fnArgs.push(opts, (...args) => resolve(args));
fnArgs.push(opts, (err, ...args) => {
if (err) return reject(err);
resolve(args);
});
super[method].apply(this, fnArgs);
});
if (hasGrabUser)
await this.executeP(`CALL account.myUser_logout()`, null, opts);
else {
// Fetch new values
const ids = [];
switch (op) {
case 'insert':
case 'update': {
switch (method) {
case 'createAll':
for (const row of res[1])
ids.push(row[idName]);
break;
case 'create':
ids.push(res[1]);
break;
case 'update':
if (data[idName] != null)
ids.push(data[idName]);
break;
}
const newWhere = ids.length ? { [idName]: ids } : where;
const stmt = this.buildSelectStmt(op, data, idName, model, newWhere, limit);
newInstances = await this.executeStmt(stmt, opts);
}
}
await this.createLogRecord(oldInstances, newInstances, model, opts);
}
if (userId) await this.executeP(`CALL account.myUser_logout()`, null, opts);
if (tx) await tx.commit();
return res;
} catch (err) {
@ -403,125 +314,6 @@ class VnMySQL extends MySQL {
throw err;
}
}
buildSelectStmt(op, data, idName, model, where, limit) {
const Model = this.getModelDefinition(model).model;
const properties = Object.keys(Model.definition.properties);
const fields = data ? Object.keys(data) : [];
if (op == 'delete')
properties.forEach(property => fields.push(property));
else {
const log = Model.definition.settings.log;
fields.push(idName);
if (log.relation) fields.push(Model.relations[log.relation].keyFrom);
if (log.showField) fields.push(log.showField);
else {
const showFieldNames = ['name', 'description', 'code', 'nickname'];
for (const field of showFieldNames) {
if (properties.includes(field)) {
log.showField = field;
fields.push(field);
break;
}
}
}
}
const stmt = new ParameterizedSQL(
'SELECT ' +
this.buildColumnNames(model, { fields }) +
' FROM ' +
this.tableEscaped(model)
);
stmt.merge(this.buildWhere(model, where));
if (limit) stmt.merge(`LIMIT 1`);
return stmt;
}
async createLogRecord(oldInstances, newInstances, model, opts) {
function setActionType() {
if (oldInstances && newInstances)
return 'update';
else if (!oldInstances && newInstances)
return 'insert';
return 'delete';
}
const action = setActionType();
if (!newInstances && action != 'delete') return;
const Model = this.getModelDefinition(model).model;
const models = Model.app.models;
const definition = Model.definition;
const log = definition.settings.log;
const primaryKey = this.idName(model);
const originRelation = log.relation;
const originFkField = originRelation
? Model.relations[originRelation].keyFrom
: primaryKey;
// Prevent adding logs when deleting a principal entity (Client, Zone...)
if (action == 'delete' && !originRelation) return;
function map(instances) {
const map = new Map();
if (!instances) return;
for (const instance of instances)
map.set(instance[primaryKey], instance);
return map;
}
const changedModel = definition.name;
const userFk = opts.httpCtx && opts.httpCtx.active.accessToken.userId;
const oldMap = map(oldInstances);
const newMap = map(newInstances);
const ids = (oldMap || newMap).keys();
const logEntries = [];
function insertValuesLogEntry(logEntry, instance) {
logEntry.originFk = instance[originFkField];
logEntry.changedModelId = instance[primaryKey];
if (log.showField) logEntry.changedModelValue = instance[log.showField];
}
for (const id of ids) {
const oldI = oldMap && oldMap.get(id);
const newI = newMap && newMap.get(id);
const logEntry = {
action,
userFk,
changedModel,
};
if (newI) {
insertValuesLogEntry(logEntry, newI);
// Delete unchanged properties
if (oldI) {
Object.keys(oldI).forEach(prop => {
const hasChanges = oldI[prop] instanceof Date ?
oldI[prop]?.getTime() != newI[prop]?.getTime() :
oldI[prop] != newI[prop];
if (!hasChanges) {
delete oldI[prop];
delete newI[prop];
}
});
}
} else
insertValuesLogEntry(logEntry, oldI);
logEntry.oldInstance = oldI;
logEntry.newInstance = newI;
logEntries.push(logEntry);
}
await models[log.model].create(logEntries, opts);
}
}
exports.VnMySQL = VnMySQL;
@ -542,7 +334,7 @@ exports.initialize = function initialize(dataSource, callback) {
if (callback) {
if (dataSource.settings.lazyConnect) {
process.nextTick(function () {
process.nextTick(function() {
callback();
});
} else
@ -550,13 +342,13 @@ exports.initialize = function initialize(dataSource, callback) {
}
};
MySQL.prototype.connect = function (callback) {
MySQL.prototype.connect = function(callback) {
const self = this;
const options = generateOptions(this.settings);
if (this.client) {
if (callback) {
process.nextTick(function () {
process.nextTick(function() {
callback(null, self.client);
});
}
@ -565,7 +357,7 @@ MySQL.prototype.connect = function (callback) {
function connectionHandler(options, callback) {
const client = mysql.createPool(options);
client.getConnection(function (err, connection) {
client.getConnection(function(err, connection) {
const conn = connection;
if (!err) {
if (self.debug)
@ -645,30 +437,27 @@ function generateOptions(settings) {
return options;
}
SQLConnector.prototype.all = function find(model, filter, options, cb) {
const self = this;
// Order by id if no order is specified
filter = filter || {};
const stmt = this.buildSelect(model, filter, options);
this.execute(stmt.sql, stmt.params, options, function (err, data) {
if (err) {
this.execute(stmt.sql, stmt.params, options, function(err, data) {
if (err)
return cb(err, []);
}
try {
const objs = data.map(function (obj) {
const objs = data.map(function(obj) {
return self.fromRow(model, obj);
});
if (filter && filter.include) {
self.getModelDefinition(model).model.include(
objs, filter.include, options, cb,
);
} else {
} else
cb(null, objs);
}
} catch (error) {
cb(error, [])
cb(error, []);
}
});
};
};

View File

@ -2,9 +2,9 @@ const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('Claim createFromSales()', () => {
const ticketId = 16;
const ticketId = 23;
const newSale = [{
id: 3,
id: 31,
instance: 0,
quantity: 10
}];

View File

@ -1,6 +1,20 @@
const app = require('vn-loopback/server/server');
const LoopBackContext = require('loopback-context');
describe('Update Claim', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
const newDate = Date.vnNew();
const originalData = {
ticketFk: 3,

View File

@ -1,6 +1,20 @@
const app = require('vn-loopback/server/server');
const LoopBackContext = require('loopback-context');
describe('Update Claim', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
const newDate = Date.vnNew();
const original = {
ticketFk: 3,

View File

@ -10,8 +10,16 @@ module.exports = Self => {
});
Self.observe('before save', async ctx => {
if (ctx.isNewInstance) return;
//await claimIsEditable(ctx);
if (ctx.isNewInstance) {
const models = Self.app.models;
const options = ctx.options;
const instance = ctx.instance;
const ticket = await models.Sale.findById(instance.saleFk, {fields: ['ticketFk']}, options);
const claim = await models.Claim.findById(instance.claimFk, {fields: ['ticketFk']}, options);
if (ticket.ticketFk != claim.ticketFk)
throw new UserError(`Cannot create a new claimBeginning from a different ticket`);
}
// await claimIsEditable(ctx);
});
Self.observe('before delete', async ctx => {

View File

@ -1,11 +1,6 @@
{
"name": "ClaimBeginning",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim",
"showField": "quantity"
},
"options": {
"mysql": {
"table": "claimBeginning"

View File

@ -1,10 +1,6 @@
{
"name": "ClaimDevelopment",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim"
},
"options": {
"mysql": {
"table": "claimDevelopment"

View File

@ -1,18 +1,14 @@
{
"name": "ClaimDms",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim"
},
"options": {
"mysql": {
"table": "claimDms"
}
},
"allowedContentTypes": [
"image/png",
"image/jpeg",
"image/png",
"image/jpeg",
"image/jpg"
],
"properties": {
@ -34,4 +30,4 @@
"foreignKey": "dmsFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "ClaimEnd",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim"
},
"options": {
"mysql": {
"table": "claimEnd"

View File

@ -1,10 +1,6 @@
{
"name": "ClaimObservation",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim"
},
"options": {
"mysql": {
"table": "claimObservation"
@ -40,4 +36,4 @@
"foreignKey": "claimFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "ClaimState",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"relation": "claim",
"showField": "description"
},
"options": {
"mysql": {
"table": "claimState"

View File

@ -1,10 +1,6 @@
{
"name": "Claim",
"base": "Loggable",
"log": {
"model": "ClaimLog",
"showField": "id"
},
"options": {
"mysql": {
"table": "claim"

View File

@ -2,11 +2,6 @@
"name": "Address",
"description": "Client addresses",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client",
"showField": "nickname"
},
"options": {
"mysql": {
"table": "address"
@ -88,4 +83,4 @@
"foreignKey": "customsAgentFk"
}
}
}
}

View File

@ -2,11 +2,6 @@
"name": "ClientContact",
"description": "Client phone contacts",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client",
"showField": "name"
},
"options": {
"mysql": {
"table": "clientContact"
@ -33,4 +28,4 @@
"foreignKey": "clientFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "ClientDms",
"base": "Loggable",
"log": {
"model":"ClientLog",
"relation": "client",
"showField": "dmsFk"
},
"options": {
"mysql": {
"table": "clientDms"

View File

@ -2,10 +2,6 @@
"name": "ClientObservation",
"description": "Client notes",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client"
},
"options": {
"mysql": {
"table": "clientObservation"

View File

@ -1,11 +1,6 @@
{
"name": "ClientSample",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client",
"showField": "type"
},
"options": {
"mysql": {
"table": "clientSample"

View File

@ -1,10 +1,6 @@
{
"name": "Client",
"base": "Loggable",
"log": {
"model":"ClientLog",
"showField": "id"
},
"options": {
"mysql": {
"table": "client"
@ -260,4 +256,4 @@
}
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "Greuge",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client",
"showField": "description"
},
"options": {
"mysql": {
"table": "greuge"
@ -58,4 +53,4 @@
"foreignKey": "userFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "Recovery",
"base": "Loggable",
"log": {
"model": "ClientLog",
"relation": "client"
},
"options": {
"mysql": {
"table": "recovery"
@ -38,4 +34,4 @@
"foreignKey": "clientFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "Buy",
"base": "Loggable",
"log": {
"model": "EntryLog",
"relation": "entry",
"grabUser": true
},
"options": {
"mysql": {
"table": "buy"

View File

@ -1,10 +1,6 @@
{
"name": "EntryObservation",
"base": "Loggable",
"log": {
"model": "EntryLog",
"relation": "entry"
},
"options": {
"mysql": {
"table": "entryObservation"

View File

@ -1,10 +1,6 @@
{
"name": "Entry",
"base": "Loggable",
"log": {
"model":"EntryLog",
"grabUser": true
},
"options": {
"mysql": {
"table": "entry"

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('invoiceIn clone()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should return the cloned invoiceIn and also clone invoiceInDueDays and invoiceInTaxes if there are any referencing the invoiceIn', async() => {
const userId = 1;
const ctx = {

View File

@ -1,10 +1,6 @@
{
"name": "InvoiceInTax",
"base": "Loggable",
"log": {
"model": "InvoiceInLog",
"relation": "invoiceIn"
},
"options": {
"mysql": {
"table": "invoiceInTax"
@ -55,4 +51,4 @@
"foreignKey": "transactionTypeSageFk"
}
}
}
}

View File

@ -1,9 +1,6 @@
{
"name": "InvoiceIn",
"base": "Loggable",
"log": {
"model": "InvoiceInLog"
},
"options": {
"mysql": {
"table": "invoiceIn"

View File

@ -56,7 +56,7 @@ module.exports = Self => {
reference: invoiceOut.ref,
recipientId: invoiceOut.clientFk
});
const stream = await invoiceReport.toPdfStream();
const buffer = await invoiceReport.toPdfStream();
const issued = invoiceOut.issued;
const year = issued.getFullYear().toString();
@ -66,7 +66,7 @@ module.exports = Self => {
const fileName = `${year}${invoiceOut.ref}.pdf`;
// Store invoice
print.storage.write(stream, {
await print.storage.write(buffer, {
type: 'invoice',
path: `${year}/${month}/${day}`,
fileName: fileName

View File

@ -100,16 +100,23 @@ class Controller extends Section {
};
this.$http.post(`InvoiceOuts/invoiceClient`, params)
.then(() => this.invoiceNext())
.catch(res => {
this.errors.unshift({
address,
message: res.data.error.message
});
const message = res.data?.error?.message || res.message;
if (res.status >= 400 && res.status < 500) {
this.errors.unshift({address, message});
this.invoiceNext();
} else {
this.invoicing = false;
this.status = 'done';
throw new UserError(`Critical invoicing error, proccess stopped`);
}
})
.finally(() => {
this.addressIndex++;
this.invoiceOut();
});
}
invoiceNext() {
this.addressIndex++;
this.invoiceOut();
}
get nAddresses() {

View File

@ -17,4 +17,5 @@ Ended process: Proceso finalizado
Invoice out: Facturar
One client: Un solo cliente
Choose a valid client: Selecciona un cliente válido
Stop: Parar
Stop: Parar
Critical invoicing error, proccess stopped: Error crítico al facturar, proceso detenido

View File

@ -1,9 +1,7 @@
const axios = require('axios');
const uuid = require('uuid');
const fs = require('fs/promises');
const {createWriteStream} = require('fs');
const path = require('path');
const gm = require('gm');
module.exports = Self => {
Self.remoteMethod('download', {
@ -27,13 +25,9 @@ module.exports = Self => {
const maxAttempts = 3;
const collectionName = 'catalog';
const tx = await Self.beginTransaction({});
let tempFilePath;
let queueRow;
try {
const myOptions = {transaction: tx};
queueRow = await Self.findOne(
{
fields: ['id', 'itemFk', 'url', 'attempts'],
@ -44,58 +38,14 @@ module.exports = Self => {
},
},
order: 'priority, attempts, updated',
},
myOptions
}
);
if (!queueRow) return;
const collection = await models.ImageCollection.findOne(
{
fields: [
'id',
'maxWidth',
'maxHeight',
'model',
'property',
],
where: {name: collectionName},
include: {
relation: 'sizes',
scope: {
fields: ['width', 'height', 'crop'],
},
},
},
myOptions
);
const fileName = `${uuid.v4()}.png`;
const fileName = `${queueRow.itemFk}.png`;
tempFilePath = path.join(tempPath, fileName);
// Insert image row
await models.Image.create(
{
name: fileName,
collectionFk: collectionName,
updated: Date.vnNow(),
},
myOptions
);
// Update item
const model = models[collection.model];
if (!model) throw new Error('No matching model found');
const item = await model.findById(queueRow.itemFk, null, myOptions);
if (item) {
await item.updateAttribute(
collection.property,
fileName,
myOptions
);
}
// Download remote image
const response = await axios.get(queueRow.url, {
responseType: 'stream',
@ -108,71 +58,22 @@ module.exports = Self => {
writeStream.on('error', error => reject(error));
});
// Resize
const container = await models.ImageContainer.container(
collectionName
);
const rootPath = container.client.root;
const collectionDir = path.join(rootPath, collectionName);
// To max size
const {maxWidth, maxHeight} = collection;
const fullSizePath = path.join(collectionDir, 'full');
const toFullSizePath = `${fullSizePath}/${fileName}`;
await fs.mkdir(fullSizePath, {recursive: true});
await new Promise((resolve, reject) => {
gm(tempFilePath)
.resize(maxWidth, maxHeight, '>')
.setFormat('png')
.write(toFullSizePath, function(err) {
if (err) reject(err);
if (!err) resolve();
});
await models.Image.resize({
collectionName: collectionName,
srcFile: tempFilePath,
fileName: fileName,
entityId: queueRow.itemFk
});
// To collection sizes
for (const size of collection.sizes()) {
const {width, height} = size;
const sizePath = path.join(collectionDir, `${width}x${height}`);
const toSizePath = `${sizePath}/${fileName}`;
await fs.mkdir(sizePath, {recursive: true});
await new Promise((resolve, reject) => {
const gmInstance = gm(tempFilePath);
if (size.crop) {
gmInstance
.resize(width, height, '^')
.gravity('Center')
.crop(width, height);
}
if (!size.crop) gmInstance.resize(width, height, '>');
gmInstance
.setFormat('png')
.write(toSizePath, function(err) {
if (err) reject(err);
if (!err) resolve();
});
});
}
try {
await fs.unlink(tempFilePath);
} catch (error) { }
await queueRow.destroy(myOptions);
await queueRow.destroy();
// Restart queue
Self.download();
await tx.commit();
} catch (error) {
await tx.rollback();
if (queueRow.attempts < maxAttempts) {
await queueRow.updateAttributes({
error: error,

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('item updateTaxes()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should throw an error if the taxClassFk is blank', async() => {
const tx = await models.Item.beginTransaction({});
const options = {transaction: tx};

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('tag onSubmit()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should delete a tag', async() => {
const tx = await models.Item.beginTransaction({});
const options = {transaction: tx};

View File

@ -1,11 +1,6 @@
{
"name": "ItemBarcode",
"base": "Loggable",
"log": {
"model": "ItemLog",
"relation": "item",
"showField": "code"
},
"options": {
"mysql": {
"table": "itemBarcode"
@ -27,6 +22,6 @@
"type": "belongsTo",
"model": "Item",
"foreignKey": "itemFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "ItemBotanical",
"base": "Loggable",
"log": {
"model": "ItemLog",
"relation": "item"
},
"options": {
"mysql": {
"table": "itemBotanical"
@ -34,4 +30,4 @@
"foreignKey": "specieFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "ItemTag",
"base": "Loggable",
"log": {
"model": "ItemLog",
"relation": "item",
"showField": "value"
},
"options": {
"mysql": {
"table": "itemTag"

View File

@ -1,11 +1,6 @@
{
"name": "ItemTaxCountry",
"base": "Loggable",
"log": {
"model": "ItemLog",
"relation": "item",
"showField": "countryFk"
},
"options": {
"mysql": {
"table": "itemTaxCountry"
@ -47,4 +42,4 @@
"foreignKey": "taxClassFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "Item",
"base": "Loggable",
"log": {
"model": "ItemLog",
"showField": "id",
"grabUser": true
},
"options": {
"mysql": {
"table": "item"

View File

@ -1,6 +1,20 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('AgencyTerm createInvoiceIn()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
const rows = [
{
routeFk: 2,

View File

@ -1,10 +1,6 @@
{
"name": "Route",
"base": "Loggable",
"log": {
"model":"RouteLog",
"grabUser": true
},
"options": {
"mysql": {
"table": "route"

View File

@ -1,10 +1,6 @@
{
"name": "Shelving",
"base": "Loggable",
"log": {
"model": "ShelvingLog",
"showField": "id"
},
"options": {
"mysql": {
"table": "shelving"

View File

@ -1,10 +1,6 @@
{
"name": "SupplierAccount",
"base": "Loggable",
"log": {
"model":"SupplierLog",
"relation": "supplier"
},
"options": {
"mysql": {
"table": "supplierAccount"
@ -35,4 +31,4 @@
"foreignKey": "bankEntityFk"
}
}
}
}

View File

@ -2,11 +2,6 @@
"name": "SupplierAddress",
"description": "Supplier addresses",
"base": "Loggable",
"log": {
"model": "SupplierLog",
"relation": "supplier",
"showField": "name"
},
"options": {
"mysql": {
"table": "supplierAddress"
@ -52,4 +47,4 @@
"foreignKey": "supplierFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "SupplierContact",
"base": "Loggable",
"log": {
"model":"SupplierLog",
"relation": "supplier"
},
"options": {
"mysql": {
"table": "supplierContact"
@ -50,4 +46,4 @@
"permission": "ALLOW"
}
]
}
}

View File

@ -1,9 +1,6 @@
{
"name": "Supplier",
"base": "Loggable",
"log": {
"model":"SupplierLog"
},
"options": {
"mysql": {
"table": "supplier"

View File

@ -22,7 +22,7 @@
value-field="id"
rule>
</vn-autocomplete>
<vn-input-number
<vn-input-number
type="number"
label="Minimum M3"
ng-model="$ctrl.supplierAgencyTerm.minimumM3"
@ -31,19 +31,20 @@
</vn-input-number>
</vn-horizontal>
<vn-horizontal>
<vn-input-number
<vn-input-number
type="number"
label="Package Price"
ng-model="$ctrl.supplierAgencyTerm.packagePrice"
rule>
</vn-input-number>
<vn-input-number
<vn-input-number
type="number"
label="Km Price"
ng-model="$ctrl.supplierAgencyTerm.kmPrice"
step="0.01"
rule>
</vn-input-number>
<vn-input-number
<vn-input-number
type="number"
label="M3 Price"
ng-model="$ctrl.supplierAgencyTerm.m3Price"
@ -52,13 +53,13 @@
</vn-input-number>
</vn-horizontal>
<vn-horizontal>
<vn-input-number
<vn-input-number
type="number"
label="Route Price"
ng-model="$ctrl.supplierAgencyTerm.routePrice"
rule>
</vn-input-number>
<vn-input-number
<vn-input-number
type="number"
label="Minimum Km"
ng-model="$ctrl.supplierAgencyTerm.minimumKm"
@ -73,4 +74,4 @@
ui-sref="supplier.card.agencyTerm.index">
</vn-button>
</vn-button-bar>
</form>
</form>

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('ticket deleteExpeditions()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should delete the selected expeditions', async() => {
const tx = await models.Expedition.beginTransaction({});

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('ticket moveExpeditions()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should move the selected expeditions to new ticket', async() => {
const tx = await models.Expedition.beginTransaction({});
const ctx = {

View File

@ -1,6 +1,20 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('ticket-request confirm()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
let ctx = {
req: {
accessToken: {userId: 9},

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('ticket-request deny()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should return the denied ticket request', async() => {
const tx = await models.TicketRequest.beginTransaction({});

View File

@ -46,7 +46,7 @@ module.exports = async function(Self, tickets, reqArgs = {}) {
const fileName = `${year}${invoiceOut.ref}.pdf`;
// Store invoice
storage.write(stream, {
await storage.write(stream, {
type: 'invoice',
path: `${year}/${month}/${day}`,
fileName: fileName

View File

@ -34,6 +34,8 @@ module.exports = Self => {
const models = Self.app.models;
const myOptions = {};
let tx;
let dms;
let gestDocCreated = false;
if (typeof options == 'object')
Object.assign(myOptions, options);
@ -96,11 +98,12 @@ module.exports = Self => {
warehouseId: ticket.warehouseFk,
companyId: ticket.companyFk,
dmsTypeId: dmsType.id,
reference: id,
description: `Ticket ${id} Cliente ${ticket.client().name} Ruta ${ticket.route().id}`,
reference: '',
description: `Firma del cliente - Ruta ${ticket.route().id}`,
hasFile: true
};
await models.Ticket.uploadFile(ctxUploadFile, id, myOptions);
dms = await models.Dms.uploadFile(ctxUploadFile, myOptions);
gestDocCreated = true;
}
try {
@ -118,12 +121,16 @@ module.exports = Self => {
throw new UserError('This ticket cannot be signed because it has not been boxed');
else if (!await gestDocExists(args.tickets[i])) {
if (args.location) setLocation(args.tickets[i]);
await createGestDoc(args.tickets[i]);
if (!gestDocCreated) await createGestDoc(args.tickets[i]);
await models.TicketDms.create({ticketFk: args.tickets[i], dmsFk: dms[0].id}, myOptions);
const ticket = await models.Ticket.findById(args.tickets[i], null, myOptions);
await ticket.updateAttribute('isSigned', true, myOptions);
await Self.rawSql(`CALL vn.ticket_setState(?, ?)`, [args.tickets[i], 'DELIVERED'], myOptions);
}
}
if (tx) await tx.commit();
return;
} catch (e) {
if (tx) await tx.rollback();
throw e;

View File

@ -17,6 +17,17 @@ describe('ticket componentUpdate()', () => {
let componentValue;
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
const deliveryComponenet = await models.Component.findOne({where: {code: 'delivery'}});
deliveryComponentId = deliveryComponenet.id;
componentOfSaleSeven = `SELECT value
@ -180,9 +191,6 @@ describe('ticket componentUpdate()', () => {
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: ctx.req
});
const oldTicket = await models.Ticket.findById(ticketID, null, options);
await models.Ticket.componentUpdate(ctx, options);

View File

@ -1,10 +1,6 @@
{
"name": "Expedition",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket"
},
"options": {
"mysql": {
"table": "expedition"
@ -59,4 +55,3 @@
}
}
}

View File

@ -1,12 +1,6 @@
{
"name": "Sale",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket",
"showField": "concept",
"grabUser": true
},
"options": {
"mysql": {
"table": "sale"

View File

@ -1,6 +1,20 @@
const app = require('vn-loopback/server/server');
const LoopBackContext = require('loopback-context');
describe('ticket model TicketTracking', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
let ticketTrackingId;
afterAll(async() => {

View File

@ -14,6 +14,15 @@
},
"scopeDays": {
"type": "number"
},
"pickingDelay": {
"type": "number"
},
"packagingInvoicingDated": {
"type": "date"
},
"daysForWarningClaim": {
"type": "number"
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "TicketDms",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket"
},
"options": {
"mysql": {
"table": "ticketDms"
@ -29,4 +25,4 @@
"foreignKey": "dmsFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "TicketObservation",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket"
},
"options": {
"mysql": {
"table": "ticketObservation"

View File

@ -1,10 +1,6 @@
{
"name": "TicketPackaging",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket"
},
"options": {
"mysql": {
"table": "ticketPackaging"

View File

@ -6,10 +6,6 @@
"table": "ticketRefund"
}
},
"log": {
"model": "TicketLog",
"relation": "originalTicket"
},
"properties": {
"id": {
"id": true,

View File

@ -1,10 +1,6 @@
{
"name": "TicketRequest",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket"
},
"options": {
"mysql": {
"table": "ticketRequest"
@ -64,4 +60,4 @@
"foreignKey": "itemFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "TicketService",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket",
"showField": "description"
},
"options": {
"mysql": {
"table": "ticketService"
@ -59,4 +54,4 @@
"foreignKey": "ticketServiceTypeFk"
}
}
}
}

View File

@ -1,16 +1,11 @@
{
"name": "TicketTracking",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket",
"showField": "stateFk"
},
"options": {
"mysql": {
"table": "ticketTracking"
}
},
"options": {
"mysql": {
"table": "ticketTracking"
}
},
"properties": {
"id": {
"id": true,
@ -48,4 +43,3 @@
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "TicketWeekly",
"base": "Loggable",
"log": {
"model": "TicketLog",
"relation": "ticket",
"showField": "ticketFk"
},
"options": {
"mysql": {
"table": "ticketWeekly"
@ -32,4 +27,4 @@
"foreignKey": "agencyModeFk"
}
}
}
}

View File

@ -1,11 +1,6 @@
{
"name": "Ticket",
"base": "Loggable",
"log": {
"model":"TicketLog",
"showField": "id",
"grabUser": true
},
"options": {
"mysql": {
"table": "ticket"

View File

@ -481,6 +481,13 @@
on-accept="$ctrl.transferSales($ctrl.transfer.ticketId)">
</vn-confirm>
<vn-confirm
vn-id="claimConfirm"
question="Do you want to continue?"
message="Claim out of time"
on-accept="$ctrl.onCreateClaimAccepted()">
</vn-confirm>
<vn-menu vn-id="moreOptions">
<vn-item translate
name="sms"
@ -503,6 +510,7 @@
ng-click="$ctrl.createClaim()"
ng-if="$ctrl.isClaimable">
Add claim
</vn-item>
<vn-item translate
name="reserve"

View File

@ -7,6 +7,7 @@ class Controller extends Section {
super($element, $);
this._sales = [];
this.manaCode = 'mana';
this.getConfig();
}
get manaCode() {
@ -43,6 +44,15 @@ class Controller extends Section {
return ticketState && ticketState.state.code;
}
getConfig() {
let filter = {
fields: ['daysForWarningClaim'],
};
this.$http.get(`TicketConfigs`, {filter})
.then(res => {
this.ticketConfig = res.data;
});
}
get isClaimable() {
if (this.ticket) {
@ -184,6 +194,16 @@ class Controller extends Section {
}
createClaim() {
const timeDifference = new Date().getTime() - new Date(this.ticket.shipped).getTime();
const pastDays = Math.floor(timeDifference / 86400000);
if (pastDays >= this.ticketConfig[0].daysForWarningClaim)
this.$.claimConfirm.show();
else
this.onCreateClaimAccepted();
}
onCreateClaimAccepted() {
const sales = this.selectedValidSales();
const params = {ticketId: this.ticket.id, sales: sales};
this.resetChanges();

View File

@ -45,6 +45,7 @@ describe('Ticket', () => {
$scope.model = crudModel;
$scope.editDiscount = {relocate: () => {}, hide: () => {}};
$scope.editPricePopover = {relocate: () => {}};
$scope.claimConfirm = {show: () => {}};
$httpBackend = _$httpBackend_;
Object.defineProperties($state.params, {
id: {
@ -61,6 +62,10 @@ describe('Ticket', () => {
controller.card = {reload: () => {}};
controller._ticket = ticket;
controller._sales = sales;
controller.ticketConfig = [
{daysForWarningClaim: 1}
];
$httpBackend.expect('GET', 'TicketConfigs').respond(200);
}));
describe('ticket() setter', () => {
@ -113,7 +118,6 @@ describe('Ticket', () => {
it('should make an HTTP GET query and return the worker mana', () => {
controller.edit = {};
const expectedAmount = 250;
$httpBackend.expect('GET', 'Tickets/1/getSalesPersonMana').respond(200, expectedAmount);
$httpBackend.expect('GET', 'Sales/usesMana').respond(200);
$httpBackend.expect('GET', 'WorkerManas/getCurrentWorkerMana').respond(200, expectedAmount);
@ -279,7 +283,17 @@ describe('Ticket', () => {
});
describe('createClaim()', () => {
it('should perform a query and call windows open', () => {
it('should call to the claimConfirm show() method', () => {
jest.spyOn(controller.$.claimConfirm, 'show').mockReturnThis();
controller.createClaim();
expect(controller.$.claimConfirm.show).toHaveBeenCalledWith();
});
});
describe('onCreateClaimAccepted()', () => {
it('should perform a query and call window open', () => {
jest.spyOn(controller, 'resetChanges').mockReturnThis();
jest.spyOn(controller.$state, 'go').mockReturnThis();
@ -290,7 +304,7 @@ describe('Ticket', () => {
const expectedParams = {ticketId: 1, sales: [firstSale]};
$httpBackend.expect('POST', `Claims/createFromSales`, expectedParams).respond(200, {id: 1});
controller.createClaim();
controller.onCreateClaimAccepted();
$httpBackend.flush();
expect(controller.resetChanges).toHaveBeenCalledWith();

View File

@ -40,4 +40,5 @@ Refund: Abono
Promotion mana: Maná promoción
Claim mana: Maná reclamación
History: Historial
Select lines to see the options: Seleccione lineas para ver las opciones
Do you want to continue?: ¿Desea continuar?
Claim out of time: Reclamación fuera de plazo

View File

@ -1,6 +1,20 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('Travel createThermograph()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
const travelId = 3;
const currentUserId = 1102;
const thermographId = '138350-0';

View File

@ -1,14 +1,9 @@
{
"name": "TravelThermograph",
"base": "Loggable",
"log": {
"model":"TravelLog",
"relation": "travel",
"showField": "ref"
},
"options": {
"mysql": {
"table": "travelThermograph"
"table": "travelThermograph"
}
},
"properties": {

View File

@ -1,11 +1,6 @@
{
"name": "Travel",
"base": "Loggable",
"log": {
"model":"TravelLog",
"showField": "ref",
"grabUser": true
},
"options": {
"mysql": {
"table": "travel"

View File

@ -1,11 +1,6 @@
{
"name": "WorkerDms",
"base": "Loggable",
"log": {
"model":"ClientLog",
"relation": "worker",
"showField": "dmsFk"
},
"options": {
"mysql": {
"table": "workerDocument"

View File

@ -2,10 +2,6 @@
"name": "Worker",
"description": "Company employees",
"base": "Loggable",
"log": {
"model":"WorkerLog",
"showField": "firstName"
},
"options": {
"mysql": {
"table": "worker"

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('agency clone()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should clone a zone', async() => {
const tx = await models.Zone.beginTransaction({});

View File

@ -1,6 +1,20 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('zone exclusionGeo()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
const zoneId = 1;
const today = Date.vnNew();

View File

@ -1,6 +1,21 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('zone toggleIsIncluded()', () => {
beforeAll(async() => {
const activeCtx = {
accessToken: {userId: 9},
http: {
req: {
headers: {origin: 'http://localhost'}
}
}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});
});
it('should return the created location with isIncluded true', async() => {
const tx = await models.Zone.beginTransaction({});

View File

@ -1,10 +1,6 @@
{
"name": "ZoneEvent",
"base": "Loggable",
"log": {
"model":"ZoneLog",
"relation": "zone"
},
"options": {
"mysql": {
"table": "zoneEvent"
@ -57,4 +53,4 @@
"foreignKey": "zoneFk"
}
}
}
}

View File

@ -1,10 +1,6 @@
{
"name": "ZoneExclusion",
"base": "Loggable",
"log": {
"model":"ZoneLog",
"relation": "zone"
},
"options": {
"mysql": {
"table": "zoneExclusion"
@ -27,4 +23,4 @@
"foreignKey": "zoneFk"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More