diff --git a/back/models/buyer.json b/back/models/buyer.json
index a1297eda3..a17d3b538 100644
--- a/back/models/buyer.json
+++ b/back/models/buyer.json
@@ -15,9 +15,6 @@
"nickname": {
"type": "string",
"required": true
- },
- "display": {
- "type": "boolean"
}
},
"acls": [
diff --git a/db/dump/fixtures.before.sql b/db/dump/fixtures.before.sql
index ffbc6a864..58a4dc9f1 100644
--- a/db/dump/fixtures.before.sql
+++ b/db/dump/fixtures.before.sql
@@ -179,12 +179,12 @@ INSERT INTO `vn`.`country`(`id`, `name`, `isUeeMember`, `code`, `currencyFk`, `i
(30,'Canarias', 1, 'IC', 1, 24, 4, 1, 2);
INSERT INTO `vn`.`warehouse`(`id`, `name`, `code`, `isComparative`, `isInventory`, `hasAvailable`, `isManaged`, `hasDms`, `hasComission`, `countryFk`, `hasProduction`, `isOrigin`, `isDestiny`)
- VALUES (1, 'Warehouse One', 'ALG', 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
+ VALUES
+ (1, 'Warehouse One', 'ALG', 1, 1, 1, 1, 1, 1, 1, 1, 1, 1),
(2, 'Warehouse Two', NULL, 1, 1, 1, 1, 0, 1, 13, 1, 1, 0),
(3, 'Warehouse Three', NULL, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0),
(4, 'Warehouse Four', NULL, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1),
(5, 'Warehouse Five', NULL, 1, 1, 1, 1, 0, 0, 1, 1, 0, 0),
- (6, 'Warehouse six', 'VNH', 1, 1, 1, 1, 0, 0, 1, 1, 0, 0),
(13, 'Inventory', 'inv', 1, 1, 1, 0, 0, 0, 1, 0, 0, 0),
(60, 'Algemesi', NULL, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0);
@@ -1544,7 +1544,7 @@ INSERT INTO `bs`.`waste`(`buyerFk`, `year`, `week`, `itemFk`, `itemTypeFk`, `sal
('103', YEAR(DATE_ADD(util.VN_CURDATE(), INTERVAL -1 WEEK)), WEEK(DATE_ADD(util.VN_CURDATE(), INTERVAL -1 WEEK), 1), 6, 1, '186', '0', '51', '53.12', '56.20', '56.20', '56.20'),
('103', YEAR(DATE_ADD(util.VN_CURDATE(), INTERVAL -1 WEEK)), WEEK(DATE_ADD(util.VN_CURDATE(), INTERVAL -1 WEEK), 1), 7, 1, '277', '0', '53.12', '56.20', '56.20', '56.20', '56.20');
-INSERT INTO vn.buy(id,entryFk,itemFk,buyingValue,quantity,packagingFk,stickers,freightValue,packageValue,comissionValue,packing,grouping,groupingMode,location,price1,price2,price3,printedStickers,isChecked,isIgnored,weight,created)
+ INSERT INTO vn.buy(id,entryFk,itemFk,buyingValue,quantity,packagingFk,stickers,freightValue,packageValue,comissionValue,packing,grouping,groupingMode,location,price1,price2,price3,printedStickers,isChecked,isIgnored,weight,created)
VALUES
(1, 1, 1, 50, 5000, 4, 1, 1.500, 1.500, 0.000, 1, 1, 'packing', NULL, 0.00, 99.6, 99.4, 0, 1, 0, 1, util.VN_CURDATE() - INTERVAL 2 MONTH),
(2, 2, 1, 50, 100, 4, 1, 1.500, 1.500, 0.000, 1, 1, 'packing', NULL, 0.00, 99.6, 99.4, 0, 1, 0, 1, util.VN_CURDATE() - INTERVAL 1 MONTH),
@@ -1560,7 +1560,7 @@ INSERT INTO vn.buy(id,entryFk,itemFk,buyingValue,quantity,packagingFk,stickers,f
(12, 6, 4, 1.25, 0, 3, 1, 2.500, 2.500, 0.000, 10, 10, 'grouping', NULL, 0.00, 1.75, 1.67, 0, 1, 0, 4, util.VN_CURDATE()),
(13, 7, 1, 50, 0, 3, 1, 2.000, 2.000, 0.000, 1, 1, 'packing', NULL, 0.00, 99.6, 99.4, 0, 1, 0, 4, util.VN_CURDATE()),
(14, 7, 2, 5, 0, 3, 1, 2.000, 2.000, 0.000, 10, 10, 'grouping', NULL, 0.00, 7.30, 7.00, 0, 1, 0, 4, util.VN_CURDATE()),
- (15, 7, 4, 1.25, 0, 3, 1, 2.000, 2.000, 0.000, 10, 10, 'grouping', NULL, 0.00, 1.75, 1.67, 0, 1, 0, 4, util.VN_CURDATE()),
+ (15, 7, 4, 1.25, 0, 3, 1, 2.000, 2.000, 0.000, 10, 10, 'grouping', NULL, 0.00, 1.75, 1.67, 0, 1, 0, 4, util.VN_CURDATE()),
(16, 99,1,50.0000, 5000, 4, 1, 1.500, 1.500, 0.000, 1, 1, 'packing', NULL, 0.00, 99.60, 99.40, 0, 1, 0, 1.00, '2024-07-30 08:13:51.000');
INSERT INTO `hedera`.`order`(`id`, `date_send`, `customer_id`, `delivery_method_id`, `agency_id`, `address_id`, `company_id`, `note`, `source_app`, `confirmed`,`total`, `date_make`, `first_row_stamp`, `confirm_date`)
@@ -2442,30 +2442,32 @@ INSERT INTO `vn`.`workerTimeControl`(`userFk`, `timed`, `manual`, `direction`, `
(1107, CONCAT(util.VN_CURDATE(), ' 10:20'), TRUE, 'middle', 1),
(1107, CONCAT(util.VN_CURDATE(), ' 14:50'), TRUE, 'out', 1);
-INSERT INTO `vn`.`dmsType`(`id`, `name`, `readRoleFk`, `writeRoleFk`, `code`)
+INSERT INTO `vn`.`dmsType`
+ (`id`, `name`, `readRoleFk`, `writeRoleFk`, `code`)
VALUES
- (1, 'Facturas Recibidas', NULL, NULL, 'invoiceIn'),
- (2, 'Doc oficial', NULL, NULL, 'officialDoc'),
- (3, 'Laboral', 37, 37, 'hhrrData'),
- (4, 'Albaranes recibidos', NULL, NULL, 'deliveryNote'),
- (5, 'Otros', 1, 1, 'miscellaneous'),
- (6, 'Pruebas', NULL, NULL, 'tests'),
- (7, 'IAE Clientes', 1, 1, 'economicActivitiesTax'),
- (8, 'Fiscal', NULL, NULL, 'fiscal'),
- (9, 'Vehiculos', NULL, NULL, 'vehicles'),
- (10, 'Plantillas', NULL, NULL, 'templates'),
- (11, 'Contratos', NULL, NULL, 'contracts'),
- (12, 'ley de pagos', 1, 1, 'paymentsLaw'),
- (13, 'Basura', 1, 1, 'trash'),
- (14, 'Ticket', 1, 1, 'ticket'),
- (15, 'Presupuestos', NULL, NULL, 'budgets'),
- (16, 'Logistica', NULL, NULL, 'logistics'),
- (17, 'cmr', 1, 1, 'cmr'),
- (18, 'dua', NULL, NULL, 'dua'),
- (19, 'inmovilizado', NULL, NULL, 'fixedAssets'),
- (20, 'Reclamación', 1, 1, 'claim'),
- (21, 'Entrada', 1, 1, 'entry'),
- (22, 'Proveedor', 1, 1, 'supplier');
+ (1, 'Facturas Recibidas', NULL, NULL, 'invoiceIn'),
+ (2, 'Doc oficial', NULL, NULL, 'officialDoc'),
+ (3, 'Laboral', 37, 37, 'hhrrData'),
+ (4, 'Albaranes recibidos', NULL, NULL, 'deliveryNote'),
+ (5, 'Otros', 1, 1, 'miscellaneous'),
+ (6, 'Pruebas', NULL, NULL, 'tests'),
+ (7, 'IAE Clientes', 1, 1, 'economicActivitiesTax'),
+ (8, 'Fiscal', NULL, NULL, 'fiscal'),
+ (9, 'Vehiculos', NULL, NULL, 'vehicles'),
+ (10, 'Plantillas', NULL, NULL, 'templates'),
+ (11, 'Contratos', NULL, NULL, 'contracts'),
+ (12, 'ley de pagos', 1, 1, 'paymentsLaw'),
+ (13, 'Basura', 1, 1, 'trash'),
+ (14, 'Ticket', 1, 1, 'ticket'),
+ (15, 'Presupuestos', NULL, NULL, 'budgets'),
+ (16, 'Logistica', NULL, NULL, 'logistics'),
+ (17, 'cmr', 1, 1, 'cmr'),
+ (18, 'dua', NULL, NULL, 'dua'),
+ (19, 'inmovilizado', NULL, NULL, 'fixedAssets'),
+ (20, 'Reclamación', 1, 1, 'claim'),
+ (21, 'Entrada', 1, 1, 'entry'),
+ (22, 'Proveedor', 1, 1, 'supplier'),
+ (23, 'Termografos', 35, 35, 'thermograph');
INSERT INTO `vn`.`dms`(`id`, `dmsTypeFk`, `file`, `contentType`, `workerFk`, `warehouseFk`, `companyFk`, `hardCopyNumber`, `hasFile`, `reference`, `description`, `created`)
VALUES
@@ -2473,7 +2475,7 @@ INSERT INTO `vn`.`dms`(`id`, `dmsTypeFk`, `file`, `contentType`, `workerFk`, `wa
(2, 5, '2.txt', 'text/plain', 5, 1, 442, 1, TRUE, 'Client:104', 'Client:104 dms for the client', util.VN_CURDATE()),
(3, 5, '3.txt', 'text/plain', 5, 1, 442, NULL, TRUE, 'Client: 104', 'Client:104 readme', util.VN_CURDATE()),
(4, 3, '4.txt', 'text/plain', 5, 1, 442, NULL, TRUE, 'Worker: 106', 'Worker:106 readme', util.VN_CURDATE()),
- (5, 5, '5.txt', 'text/plain', 5, 1, 442, NULL, TRUE, 'travel: 1', 'dmsForThermograph', util.VN_CURDATE()),
+ (5, 23, '5.txt', 'text/plain', 5, 1, 442, NULL, TRUE, 'travel: 1', 'dmsForThermograph', util.VN_CURDATE()),
(6, 5, '6.txt', 'text/plain', 5, 1, 442, NULL, TRUE, 'NotExists', 'DoesNotExists', util.VN_CURDATE()),
(7, 20, '7.jpg', 'image/jpeg', 9, 1, 442, NULL, FALSE, '1', 'TICKET ID DEL CLIENTE BRUCE WAYNE ID 1101', util.VN_CURDATE()),
(8, 20, '8.mp4', 'video/mp4', 9, 1, 442, NULL, FALSE, '1', 'TICKET ID DEL CLIENTE BRUCE WAYNE ID 1101', util.VN_CURDATE()),
@@ -3188,7 +3190,7 @@ UPDATE vn.department
SET workerFk = null;
INSERT INTO vn.packaging
- VALUES('--', 2745600.00, 100.00, 120.00, 220.00, 0.00, 1, '2001-01-01 00:00:00.000', NULL, NULL, NULL, 0.00, 16, 0.00, 0, NULL, 0.00, NULL, NULL, 0, NULL, 0, 0,0);
+ VALUES('--', 2745600.00, 100.00, 120.00, 220.00, 0.00, 1, '2001-01-01 00:00:00.000', NULL, NULL, NULL, 0.00, 16, 0.00, 0, NULL, 0.00, NULL, NULL, 0, NULL, 0, 0,0,1);
INSERT IGNORE INTO vn.intrastat
@@ -3939,37 +3941,43 @@ INSERT INTO vn.medicalReview
(id, workerFk, centerFk, `date`, `time`, isFit, amount, invoice, remark)
VALUES(3, 9, 2, '2000-01-01', '8:00', 1, 150.0, NULL, NULL);
-INSERT INTO vn.stockBought (workerFk, bought, reserve, dated)
- VALUES(35, 1.00, 1.00, '2001-01-01');
-
-INSERT INTO vn.auctionConfig (id,conversionCoefficient,warehouseFk)
- VALUES (1,0.6,6);
-
-INSERT INTO vn.payrollComponent (id, name, isSalaryAgreed, isVariable, isException)
- VALUES (1, 'Salario1', 1, 0, 0),
+INSERT INTO vn.payrollComponent
+(id, name, isSalaryAgreed, isVariable, isException)
+ VALUES
+ (1, 'Salario1', 1, 0, 0),
(2, 'Salario2', 1, 1, 0),
(3, 'Salario3', 1, 0, 1);
-INSERT INTO vn.workerIncome (debit, credit, incomeTypeFk, paymentDate, workerFk, concept)
- VALUES (1000.00, 900.00, 2, '2000-01-01', 1106, NULL),
+
+INSERT INTO vn.workerIncome
+(debit, credit, incomeTypeFk, paymentDate, workerFk, concept)
+ VALUES
+ (1000.00, 900.00, 2, '2000-01-01', 1106, NULL),
(1001.00, 800.00, 2, '2000-01-01', 1106, NULL);
-INSERT INTO dipole.printer (id, description) VALUES(1, '');
-INSERT INTO dipole.expedition_PrintOut (expeditionFk, ticketFk, addressFk, street, postalCode, city, shopName, isPrinted, created, printerFk, routeFk, parkingCode, truckName, clientFk, phone, province, agency, m3, workerCode, itemFk, quantity, longName, shelvingFk, comments)
- VALUES(1, 1, 0, ' ', ' ', ' ', ' ', 0, '2001-01-01 00:00:00', 1, 0, ' ', ' ', 0, NULL, '', NULL, 0.000, NULL, 10, NULL, NULL, 'NCC', NULL);
+INSERT INTO dipole.printer (id, description)
+VALUES(1, '');
-INSERT INTO vn.accountDetail (id, value, accountDetailTypeFk, supplierAccountFk)
- VALUES (21, 'ES12345B12345678', 3, 241),
- (35, 'ES12346B12345679', 3, 241);
+INSERT INTO dipole.expedition_PrintOut (expeditionFk, ticketFk, addressFk, street, postalCode, city, shopName, isPrinted, created, printerFk, routeFk, parkingCode,
+truckName, clientFk, phone, province, agency, m3, workerCode, itemFk, quantity, longName, shelvingFk, comments)
+VALUES(1, 1, 0, ' ', ' ', ' ', ' ', 0, '2001-01-01 00:00:00', 1, 0, ' ', ' ', 0, NULL, '', NULL, 0.000, NULL, 10, NULL, NULL, 'NCC', NULL);
-INSERT INTO vn.accountDetailType (id, description, code)
- VALUES (1, 'IBAN', 'iban'),
- (2, 'SWIFT', 'swift'),
- (3, 'Referencia Remesas', 'remRef'),
- (4, 'Referencia Transferencias', 'trnRef'),
- (5, 'Referencia Nominas', 'payRef'),
- (6, 'ABA', 'aba');
+INSERT INTO vn.accountDetail
+(id, value, accountDetailTypeFk, supplierAccountFk)
+VALUES
+ (21, 'ES12345B12345678', 3, 241),
+ (35, 'ES12346B12345679', 3, 241);
+
+INSERT INTO vn.accountDetailType
+(id, description, code)
+VALUES
+ (1, 'IBAN', 'iban'),
+ (2, 'SWIFT', 'swift'),
+ (3, 'Referencia Remesas', 'remRef'),
+ (4, 'Referencia Transferencias', 'trnRef'),
+ (5, 'Referencia Nominas', 'payRef'),
+ (6, 'ABA', 'aba');
INSERT IGNORE INTO ormConfig
SET id =1,
diff --git a/db/routines/hedera/procedures/orderRow_updateOverstocking.sql b/db/routines/hedera/procedures/orderRow_updateOverstocking.sql
new file mode 100644
index 000000000..ff8362c65
--- /dev/null
+++ b/db/routines/hedera/procedures/orderRow_updateOverstocking.sql
@@ -0,0 +1,52 @@
+DELIMITER $$
+CREATE OR REPLACE DEFINER=`vn`@`localhost`
+PROCEDURE `hedera`.`orderRow_updateOverstocking`(vOrderFk INT)
+BEGIN
+/**
+* Set amount = 0 to avoid overbooking sales
+*
+* @param vOrderFk hedera.order.id
+*/
+ DECLARE vCalcFk INT;
+ DECLARE vDone BOOL;
+ DECLARE vWarehouseFk INT;
+
+ DECLARE cWarehouses CURSOR FOR
+ SELECT DISTINCT warehouseFk
+ FROM orderRow
+ WHERE orderFk = vOrderFk
+ AND shipped = util.VN_CURDATE();
+
+ DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
+
+ DECLARE EXIT HANDLER FOR SQLEXCEPTION
+ BEGIN
+ ROLLBACK;
+ RESIGNAL;
+ END;
+
+ OPEN cWarehouses;
+ checking: LOOP
+ SET vDone = FALSE;
+
+ FETCH cWarehouses INTO vWarehouseFk;
+
+ IF vDone THEN
+ LEAVE checking;
+ END IF;
+
+ CALL cache.available_refresh(vCalcFk, FALSE, vWarehouseFk, util.VN_CURDATE());
+
+ UPDATE orderRow r
+ JOIN `order` o ON o.id = r.orderFk
+ JOIN orderConfig oc
+ JOIN cache.available a ON a.calc_id = vCalcFk AND a.item_id = r.itemFk
+ SET r.amount = 0
+ WHERE ADDTIME(o.rowUpdated, oc.reserveTime) < util.VN_NOW()
+ AND a.available <= 0
+ AND r.warehouseFk = vWarehouseFk
+ AND r.orderFk = vOrderFk;
+ END LOOP;
+ CLOSE cWarehouses;
+END$$
+DELIMITER ;
\ No newline at end of file
diff --git a/db/routines/hedera/procedures/order_confirmWithUser.sql b/db/routines/hedera/procedures/order_confirmWithUser.sql
index 2b033b704..b3aab522e 100644
--- a/db/routines/hedera/procedures/order_confirmWithUser.sql
+++ b/db/routines/hedera/procedures/order_confirmWithUser.sql
@@ -12,6 +12,7 @@ BEGIN
* @param vUser The user identifier
*/
DECLARE vHasRows BOOL;
+ DECLARE vHas0Amount BOOL;
DECLARE vDone BOOL;
DECLARE vWarehouseFk INT;
DECLARE vShipment DATE;
@@ -101,6 +102,8 @@ BEGIN
CALL order_checkEditable(vSelf);
+ CALL orderRow_updateOverstocking(vSelf);
+
-- Check order is not empty
SELECT COUNT(*) > 0 INTO vHasRows
FROM orderRow
@@ -111,6 +114,18 @@ BEGIN
CALL util.throw('ORDER_EMPTY');
END IF;
+ -- Check if any product has a quantity of 0
+ SELECT EXISTS (
+ SELECT id
+ FROM orderRow
+ WHERE orderFk = vSelf
+ AND amount = 0
+ ) INTO vHas0Amount;
+
+ IF vHas0Amount THEN
+ CALL util.throw('Remove lines with quantity = 0 before confirming');
+ END IF;
+
-- Crea los tickets del pedido
OPEN vDates;
lDates: LOOP
diff --git a/db/routines/hedera/triggers/orderRow_afterInsert.sql b/db/routines/hedera/triggers/orderRow_afterInsert.sql
new file mode 100644
index 000000000..af1a1479f
--- /dev/null
+++ b/db/routines/hedera/triggers/orderRow_afterInsert.sql
@@ -0,0 +1,10 @@
+DELIMITER $$
+CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `hedera`.`orderRow_afterInsert`
+ AFTER INSERT ON `orderRow`
+ FOR EACH ROW
+BEGIN
+ UPDATE `order`
+ SET rowUpdated = NOW()
+ WHERE id = NEW.orderFk;
+END$$
+DELIMITER ;
\ No newline at end of file
diff --git a/db/routines/vn/events/travel_setDelivered.sql b/db/routines/vn/events/travel_setDelivered.sql
index 769ee9d24..396f3e144 100644
--- a/db/routines/vn/events/travel_setDelivered.sql
+++ b/db/routines/vn/events/travel_setDelivered.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` EVENT `vn`.`travel_setDelivered`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` EVENT `vn`.`travel_setDelivered`
ON SCHEDULE EVERY 1 DAY
STARTS '2024-07-12 00:10:00.000'
ON COMPLETION PRESERVE
diff --git a/db/routines/vn/procedures/buy_getUltimate.sql b/db/routines/vn/procedures/buy_getUltimate.sql
index 023e81774..1532222ad 100644
--- a/db/routines/vn/procedures/buy_getUltimate.sql
+++ b/db/routines/vn/procedures/buy_getUltimate.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`buy_getUltimate`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`buy_getUltimate`(
vItemFk INT,
vWarehouseFk SMALLINT,
vDated DATE
diff --git a/db/routines/vn/procedures/buy_getUltimateFromInterval.sql b/db/routines/vn/procedures/buy_getUltimateFromInterval.sql
index 2115beb95..24a843eb0 100644
--- a/db/routines/vn/procedures/buy_getUltimateFromInterval.sql
+++ b/db/routines/vn/procedures/buy_getUltimateFromInterval.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`buy_getUltimateFromInterval`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`buy_getUltimateFromInterval`(
vItemFk INT,
vWarehouseFk SMALLINT,
vStarted DATE,
diff --git a/db/routines/vn/procedures/catalog_componentCalculate.sql b/db/routines/vn/procedures/catalog_componentCalculate.sql
index d4ce88ca7..e29e13a8c 100644
--- a/db/routines/vn/procedures/catalog_componentCalculate.sql
+++ b/db/routines/vn/procedures/catalog_componentCalculate.sql
@@ -29,21 +29,24 @@ BEGIN
(INDEX (itemFk))
ENGINE = MEMORY
SELECT i.id itemFk,
- SUM(IFNULL(pd.absIncreasing,0)) absIncreasing,
- SUM(IFNULL(pd.ratIncreasing,0)) ratIncreasing,
- pd.warehouseFk
- FROM item i
- JOIN priceDelta pd
- ON pd.itemTypeFk = i.typeFk
- AND (pd.minSize IS NULL OR pd.minSize <= i.`size`)
- AND (pd.maxSize IS NULL OR pd.maxSize >= i.`size`)
- AND (pd.inkFk IS NULL OR pd.inkFk = i.inkFk)
- AND (pd.originFk IS NULL OR pd.originFk = i.originFk)
- AND (pd.producerFk IS NULL OR pd.producerFk = i.producerFk)
- AND (pd.warehouseFk IS NULL OR pd.warehouseFk = vWarehouseFk)
- WHERE (pd.fromDated IS NULL OR pd.fromDated <= vShipped)
- AND (pd.toDated IS NULL OR pd.toDated >= vShipped)
- GROUP BY i.id;
+ SUM(IFNULL(pd.absIncreasing,0)) absIncreasing,
+ SUM(IFNULL(pd.ratIncreasing,0)) ratIncreasing,
+ pd.warehouseFk
+ FROM item i
+ JOIN priceDelta pd
+ ON pd.itemTypeFk = i.typeFk
+ AND (pd.minSize IS NULL OR pd.minSize <= i.`size`)
+ AND (pd.maxSize IS NULL OR pd.maxSize >= i.`size`)
+ AND (pd.inkFk IS NULL OR pd.inkFk = i.inkFk)
+ AND (pd.originFk IS NULL OR pd.originFk = i.originFk)
+ AND (pd.producerFk IS NULL OR pd.producerFk = i.producerFk)
+ AND (pd.warehouseFk IS NULL OR pd.warehouseFk = vWarehouseFk)
+ LEFT JOIN zoneGeo zg ON zg.id = pd.zoneGeoFk
+ LEFT JOIN zoneGeo zg2 ON zg2.id = address_getGeo(vAddressFk)
+ WHERE (pd.fromDated IS NULL OR pd.fromDated <= vShipped)
+ AND (pd.toDated IS NULL OR pd.toDated >= vShipped)
+ AND (pd.zoneGeoFk IS NULL OR zg2.lft BETWEEN zg.lft AND zg.rgt)
+ GROUP BY itemFk;
CREATE OR REPLACE TEMPORARY TABLE tSpecialPrice
(INDEX (itemFk))
@@ -130,15 +133,15 @@ BEGIN
-- Bonus del comprador a un rango de productos
INSERT INTO tmp.ticketComponent(warehouseFk, itemFk, componentFk, cost)
- SELECT
+ SELECT
tcb.warehouseFk,
tcb.itemFk,
c.id,
IFNULL(tcb.base * tpd.ratIncreasing / 100,0) + IFNULL(tpd.absIncreasing,0)
FROM tmp.ticketComponentBase tcb
JOIN component c ON c.code = 'bonus'
- JOIN tPriceDelta tpd
- ON tpd.itemFk = tcb.itemFk
+ JOIN tPriceDelta tpd
+ ON tpd.itemFk = tcb.itemFk
AND tpd.warehouseFk = tcb.warehouseFk;
-- RECOBRO
diff --git a/db/routines/vn/procedures/clean.sql b/db/routines/vn/procedures/clean.sql
index a8ca68e5f..4a1f526fc 100644
--- a/db/routines/vn/procedures/clean.sql
+++ b/db/routines/vn/procedures/clean.sql
@@ -59,7 +59,7 @@ BEGIN
DELETE b FROM buy b
JOIN entryConfig e ON e.defaultEntry = b.entryFk
WHERE b.created < v2Months;
- DELETE FROM stockBuyed WHERE creationDate < v2Months;
+ DELETE FROM stockBought WHERE dated < v2Months;
DELETE FROM printQueue WHERE statusCode = 'printed' AND created < v2Months;
-- Equipos duplicados
DELETE w.*
diff --git a/db/routines/vn/procedures/collectionPlacement_get.sql b/db/routines/vn/procedures/collectionPlacement_get.sql
index d81847375..239dbd3a2 100644
--- a/db/routines/vn/procedures/collectionPlacement_get.sql
+++ b/db/routines/vn/procedures/collectionPlacement_get.sql
@@ -55,24 +55,20 @@ BEGIN
SELECT ts.saleFk,
ts.itemFk,
CAST(0 AS DECIMAL(10,0)) saleOrder,
- IF(ish.visible > 0 OR iss.id, 1, 100000) *
- IFNULL(p2.pickingOrder, p.pickingOrder) `order`,
- TO_SECONDS(IF(iss.id,
- iss.created - INTERVAL vCurrentYear YEAR,
- ish.created - INTERVAL YEAR(ish.created) YEAR)) priority,
+ (IF(ish.visible > 0 OR iss.id, 1, 100000) *
+ COALESCE(p2.pickingOrder, p.pickingOrder)) `order`,
+ TO_SECONDS(COALESCE(iss.created, ish.created)) - TO_SECONDS(MAKEDATE(IFNULL(YEAR(iss.created), YEAR(ish.created)), 1)) priority,
CONCAT(
- IF(iss.id,
- CONCAT('< ', IFNULL(wk.`code`, '---'),' > '),
- ''),
- p.`code`) COLLATE utf8_general_ci placement,
+ IF(iss.id, CONCAT('< ', COALESCE(wk.`code`, '---'),' > '), ''),
+ p.`code`
+ ) COLLATE utf8_general_ci placement,
sh.priority shelvingPriority,
sh.code COLLATE utf8_general_ci shelving,
ish.created,
ish.visible,
- IFNULL(
- IF(st.code = 'previousByPacking', ish.packing, g.`grouping`),
- 1) `grouping`,
- st.code = 'previousPrepared' isPreviousPrepared,
+ COALESCE(
+ IF(st.code = 'previousByPacking', ish.packing, g.`grouping`),1) `grouping`,
+ (st.code = 'previousPrepared') isPreviousPrepared,
iss.id itemShelvingSaleFk,
ts.ticketFk,
iss.id,
@@ -80,11 +76,12 @@ BEGIN
iss.userFk,
ts.quantity
FROM tSale ts
- LEFT JOIN (SELECT DISTINCT saleFk
- FROM saleTracking st
- JOIN state s ON s.id = st.stateFk
- WHERE st.isChecked
- AND s.semaphore = 1) st ON st.saleFk = ts.saleFk
+ LEFT JOIN (SELECT st.saleFk
+ FROM saleTracking st
+ JOIN state s ON s.id = st.stateFk
+ WHERE st.isChecked
+ AND s.semaphore = 1
+ GROUP BY st.saleFk) st ON st.saleFk = ts.saleFk
JOIN itemShelving ish ON ish.itemFk = ts.itemFk
JOIN shelving sh ON sh.code = ish.shelvingFk
JOIN parking p ON p.id = sh.parkingFk
@@ -93,14 +90,14 @@ BEGIN
JOIN warehouse w ON w.id = sc.warehouseFk
LEFT JOIN tGrouping g ON g.itemFk = ts.itemFk
LEFT JOIN itemShelvingSale iss ON iss.saleFk = ts.saleFk
- AND iss.itemShelvingFk = ish.id
+ AND iss.itemShelvingFk = ish.id
LEFT JOIN worker wk ON wk.id = iss.userFk
LEFT JOIN saleGroupDetail sgd ON sgd.saleFk = ts.saleFk
LEFT JOIN saleGroup sg ON sg.id = sgd.saleGroupFk
LEFT JOIN parking p2 ON p2.id = sg.parkingFk
WHERE w.id = vWarehouseFk
- AND NOT sc.isHideForPickers
- HAVING (iss.id AND st.saleFk) OR salePreviousPrepared IS NULL;
+ AND NOT sc.isHideForPickers
+ AND ((iss.id AND st.saleFk) OR st.saleFk IS NULL);
CREATE OR REPLACE TEMPORARY TABLE tSalePlacementList2
(INDEX(saleFk), INDEX(olderPriority))
diff --git a/db/routines/vn/procedures/collection_getTickets.sql b/db/routines/vn/procedures/collection_getTickets.sql
index 0f675041a..4566792fa 100644
--- a/db/routines/vn/procedures/collection_getTickets.sql
+++ b/db/routines/vn/procedures/collection_getTickets.sql
@@ -23,7 +23,7 @@ BEGIN
JOIN vn.ticketCollection tc ON tc.ticketFk = tob.ticketFk
LEFT JOIN vn.observationType ot ON ot.id = tob.observationTypeFk
WHERE ot.`code` = 'itemPicker'
- AND tc.collectionFk = vParamFk
+ AND tc.collectionFk = vParamFk OR tc.ticketFk = vParamFk
)
SELECT t.id ticketFk,
IF(!(vItemPackingTypeFk <=> 'V'), cc.code, CONCAT(SUBSTRING('ABCDEFGH', tc.wagon, 1), '-', tc.`level`)) `level`,
diff --git a/db/routines/vn/procedures/collection_mergeSales.sql b/db/routines/vn/procedures/collection_mergeSales.sql
index 26444d6f9..297bdb97e 100644
--- a/db/routines/vn/procedures/collection_mergeSales.sql
+++ b/db/routines/vn/procedures/collection_mergeSales.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`collection_mergeSales`(vCollectionFk INT)
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`collection_mergeSales`(vCollectionFk INT)
BEGIN
DECLARE vDone BOOL;
DECLARE vTicketFk INT;
diff --git a/db/routines/vn/procedures/collection_new.sql b/db/routines/vn/procedures/collection_new.sql
index f04d5241e..480a88f35 100644
--- a/db/routines/vn/procedures/collection_new.sql
+++ b/db/routines/vn/procedures/collection_new.sql
@@ -1,5 +1,8 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`collection_new`(vUserFk INT, OUT vCollectionFk INT)
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`collection_new`(
+ vUserFk INT,
+ OUT vCollectionFk INT
+)
BEGIN
/**
* Genera colecciones de tickets sin asignar trabajador.
@@ -12,30 +15,29 @@ BEGIN
DECLARE vLinesLimit INT;
DECLARE vTicketLines INT;
DECLARE vVolumeLimit DECIMAL;
- DECLARE vTicketVolume DECIMAL;
DECLARE vSizeLimit INT;
+ DECLARE vTicketVolume DECIMAL;
DECLARE vMaxTickets INT;
- DECLARE vStateFk VARCHAR(45);
+ DECLARE vStateCode VARCHAR(45);
DECLARE vFirstTicketFk INT;
- DECLARE vHour INT;
- DECLARE vMinute INT;
DECLARE vWorkerCode VARCHAR(3);
- DECLARE vWagonCounter INT DEFAULT 0;
+ DECLARE vWagonCounter INT DEFAULT 1;
DECLARE vTicketFk INT;
DECLARE vItemPackingTypeFk VARCHAR(1);
- DECLARE vHasAssignedTickets BOOLEAN;
+ DECLARE vHasAssignedTickets BOOL;
DECLARE vHasUniqueCollectionTime BOOL;
- DECLARE vDone INT DEFAULT FALSE;
- DECLARE vLockName VARCHAR(215);
- DECLARE vLockTime INT DEFAULT 30;
+ DECLARE vHeight INT;
+ DECLARE vVolume INT;
+ DECLARE vLiters INT;
+ DECLARE vLines INT;
+ DECLARE vTotalLines INT DEFAULT 0;
+ DECLARE vTotalVolume INT DEFAULT 0;
DECLARE vFreeWagonFk INT;
- DECLARE vErrorNumber INT;
- DECLARE vErrorMsg TEXT;
+ DECLARE vDone INT DEFAULT FALSE;
- DECLARE c1 CURSOR FOR
+ DECLARE vTickets CURSOR FOR
SELECT ticketFk, `lines`, m3
FROM tmp.productionBuffer
- WHERE ticketFk <> vFirstTicketFk
ORDER BY HH,
mm,
productionOrder DESC,
@@ -48,26 +50,6 @@ BEGIN
DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
- DECLARE EXIT HANDLER FOR SQLEXCEPTION
- BEGIN
- GET DIAGNOSTICS CONDITION 1
- vErrorNumber = MYSQL_ERRNO,
- vErrorMsg = MESSAGE_TEXT;
-
- CALL util.debugAdd('collection_new', JSON_OBJECT(
- 'errorNumber', vErrorNumber,
- 'errorMsg', vErrorMsg,
- 'lockName', vLockName,
- 'userFk', vUserFk,
- 'ticketFk', vTicketFk
- )); -- Tmp
-
- IF vLockName IS NOT NULL THEN
- DO RELEASE_LOCK(vLockName);
- END IF;
- RESIGNAL;
- END;
-
SELECT pc.ticketTrolleyMax * o.numberOfWagons,
pc.hasUniqueCollectionTime,
w.code,
@@ -78,36 +60,26 @@ BEGIN
o.trainFk,
o.linesLimit,
o.volumeLimit,
- o.sizeLimit,
- pc.collection_new_lockname
+ o.sizeLimit
INTO vMaxTickets,
- vHasUniqueCollectionTime,
- vWorkerCode,
- vWarehouseFk,
- vItemPackingTypeFk,
- vStateFk,
- vWagons,
- vTrainFk,
- vLinesLimit,
- vVolumeLimit,
- vSizeLimit,
- vLockName
- FROM productionConfig pc
- JOIN worker w ON w.id = vUserFk
+ vHasUniqueCollectionTime,
+ vWorkerCode,
+ vWarehouseFk,
+ vItemPackingTypeFk,
+ vStateCode,
+ vWagons,
+ vTrainFk,
+ vLinesLimit,
+ vVolumeLimit,
+ vSizeLimit
+ FROM worker w
+ JOIN operator o ON o.workerFk = w.id
JOIN state st ON st.`code` = 'ON_PREPARATION'
- JOIN operator o ON o.workerFk = vUserFk;
-
- SET vLockName = CONCAT_WS('/',
- vLockName,
- vWarehouseFk,
- vItemPackingTypeFk
- );
-
- IF NOT GET_LOCK(vLockName, vLockTime) THEN
- CALL util.throw(CONCAT('Cannot get lock: ', vLockName));
- END IF;
+ JOIN productionConfig pc
+ WHERE w.id = vUserFk;
-- Se prepara el tren, con tantos vagones como sea necesario.
+
CREATE OR REPLACE TEMPORARY TABLE tTrain
(wagon INT,
shelve INT,
@@ -118,59 +90,60 @@ BEGIN
PRIMARY KEY(wagon, shelve))
ENGINE = MEMORY;
- WHILE vWagons > vWagonCounter DO
- SET vWagonCounter = vWagonCounter + 1;
-
- INSERT INTO tTrain(wagon, shelve, liters, `lines`, height)
- SELECT vWagonCounter, cv.`level` , cv.liters , cv.`lines` , cv.height
- FROM collectionVolumetry cv
- WHERE cv.trainFk = vTrainFk
+ INSERT INTO tTrain (wagon, shelve, liters, `lines`, height)
+ WITH RECURSIVE wagonSequence AS (
+ SELECT vWagonCounter wagon
+ UNION ALL
+ SELECT wagon + 1 wagon
+ FROM wagonSequence
+ WHERE wagon < vWagonCounter + vWagons -1
+ )
+ SELECT ws.wagon, cv.`level`, cv.liters, cv.`lines`, cv.height
+ FROM wagonSequence ws
+ JOIN vn.collectionVolumetry cv ON cv.trainFk = vTrainFk
AND cv.itemPackingTypeFk = vItemPackingTypeFk;
- END WHILE;
-- Esto desaparecerá cuando tengamos la table cache.ticket
+
CALL productionControl(vWarehouseFk, 0);
ALTER TABLE tmp.productionBuffer
ADD COLUMN liters INT,
ADD COLUMN height INT;
- -- Se obtiene nº de colección.
- INSERT INTO collection
- SET itemPackingTypeFk = vItemPackingTypeFk,
- trainFk = vTrainFk,
- wagons = vWagons,
- warehouseFk = vWarehouseFk;
-
- SELECT LAST_INSERT_ID() INTO vCollectionFk;
-
-- Los tickets de recogida en Algemesí sólo se sacan si están asignados.
-- Los pedidos con riesgo no se sacan aunque se asignen.
- DELETE pb.*
+
+ DELETE pb
FROM tmp.productionBuffer pb
JOIN state s ON s.id = pb.state
WHERE (pb.agency = 'REC_ALGEMESI'
AND s.code <> 'PICKER_DESIGNED')
OR pb.problem LIKE '%RIESGO%';
- -- Comprobamos si hay tickets asignados. En ese caso, nos centramos
- -- exclusivamente en esos tickets y los sacamos independientemente
- -- de problemas o tamaños
- SELECT COUNT(*) INTO vHasAssignedTickets
- FROM tmp.productionBuffer pb
- JOIN state s ON s.id = pb.state
- WHERE s.code = 'PICKER_DESIGNED'
- AND pb.workerCode = vWorkerCode;
+ -- Si hay tickets asignados, nos centramos exclusivamente en esos tickets
+ -- y los sacamos independientemente de problemas o tamaños
- -- Se dejan en la tabla tmp.productionBuffer sólo aquellos tickets adecuados
- IF vHasAssignedTickets THEN
- DELETE pb.*
+ SELECT EXISTS (
+ SELECT TRUE
FROM tmp.productionBuffer pb
JOIN state s ON s.id = pb.state
- WHERE s.code <> 'PICKER_DESIGNED'
- OR pb.workerCode <> vWorkerCode;
+ WHERE (s.code = 'PICKER_DESIGNED'
+ AND pb.workerCode = vWorkerCode)
+ OR s.code = 'LAST_CALL'
+ ) INTO vHasAssignedTickets;
+
+ -- Se dejan en la tabla tmp.productionBuffer sólo aquellos tickets adecuados
+
+ IF vHasAssignedTickets THEN
+ DELETE pb
+ FROM tmp.productionBuffer pb
+ JOIN state s ON s.id = pb.state
+ WHERE (s.code <> 'PICKER_DESIGNED'
+ OR pb.workerCode <> vWorkerCode)
+ AND s.code <> 'LAST_CALL';
ELSE
- DELETE pb.*
+ DELETE pb
FROM tmp.productionBuffer pb
JOIN state s ON s.id = pb.state
JOIN agencyMode am ON am.id = pb.agencyModeFk
@@ -193,26 +166,25 @@ BEGIN
OR (NOT pb.H AND pb.V > 0 AND vItemPackingTypeFk = 'H')
OR (NOT pb.V AND vItemPackingTypeFk = 'V')
OR (pc.isPreviousPreparationRequired AND pb.previousWithoutParking)
- OR LENGTH(pb.problem) > 0
+ OR LENGTH(pb.problem)
OR pb.lines > vLinesLimit
OR pb.m3 > vVolumeLimit
OR sub.maxSize > vSizeLimit
OR pb.hasPlantTray;
END IF;
- -- Es importante que el primer ticket se coja en todos los casos
- SELECT ticketFk,
- HH,
- mm,
- `lines`,
- m3
- INTO vFirstTicketFk,
- vHour,
- vMinute,
- vTicketLines,
- vTicketVolume
+ -- Hay que excluir aquellos que no tengan la misma hora de preparacion, si procede
+
+ IF vHasUniqueCollectionTime THEN
+ DELETE pb
+ FROM tmp.productionBuffer pb
+ JOIN tmp.productionBuffer pb2 ON pb2.ticketFk = vFirstTicketFk
+ AND (pb.HH <> pb2.HH OR pb.mm <> pb2.mm);
+ END IF;
+
+ SELECT ticketFk INTO vFirstTicketFk
FROM tmp.productionBuffer
- ORDER BY HH,
+ ORDER BY HH,
mm,
productionOrder DESC,
m3 DESC,
@@ -222,44 +194,37 @@ BEGIN
ticketFk
LIMIT 1;
- -- Hay que excluir aquellos que no tengan la misma hora de preparacion, si procede
- IF vHasUniqueCollectionTime THEN
- DELETE FROM tmp.productionBuffer
- WHERE HH <> vHour
- OR mm <> vMinute;
- END IF;
-
- SET vTicketFk = vFirstTicketFk;
- SET @lines = 0;
- SET @volume = 0;
-
- OPEN c1;
- read_loop: LOOP
+ OPEN vTickets;
+ l: LOOP
SET vDone = FALSE;
+ FETCH vTickets INTO vTicketFk, vTicketLines, vTicketVolume;
+
+ IF vDone THEN
+ LEAVE l;
+ END IF;
-- Buscamos un ticket que cumpla con los requisitos en el listado
- IF ((vTicketLines + @lines) <= vLinesLimit OR vLinesLimit IS NULL)
- AND ((vTicketVolume + @volume) <= vVolumeLimit OR vVolumeLimit IS NULL) THEN
+
+ IF (vLinesLimit IS NULL OR (vTotalLines + vTicketLines) <= vLinesLimit)
+ AND (vVolumeLimit IS NULL OR (vTotalVolume + vTicketVolume) <= vVolumeLimit) THEN
CALL ticket_splitItemPackingType(vTicketFk, vItemPackingTypeFk);
DROP TEMPORARY TABLE tmp.ticketIPT;
+ SELECT COUNT(*), SUM(litros), MAX(i.`size`), SUM(sv.volume)
+ INTO vLines, vLiters, vHeight, vVolume
+ FROM saleVolume sv
+ JOIN sale s ON s.id = sv.saleFk
+ JOIN item i ON i.id = s.itemFk
+ WHERE sv.ticketFk = vTicketFk;
+
+ SET vTotalVolume = vTotalVolume + vVolume,
+ vTotalLines = vTotalLines + vLines;
+
UPDATE tmp.productionBuffer pb
- JOIN (
- SELECT SUM(litros) liters,
- @lines:= COUNT(*) + @lines,
- COUNT(*) `lines`,
- MAX(i.`size`) height,
- @volume := SUM(sv.volume) + @volume,
- SUM(sv.volume) volume
- FROM saleVolume sv
- JOIN sale s ON s.id = sv.saleFk
- JOIN item i ON i.id = s.itemFk
- WHERE sv.ticketFk = vTicketFk
- ) sub
- SET pb.liters = sub.liters,
- pb.`lines` = sub.`lines`,
- pb.height = sub.height
+ SET pb.liters = vLiters,
+ pb.`lines` = vLines,
+ pb.height = vHeight
WHERE pb.ticketFk = vTicketFk;
UPDATE tTrain tt
@@ -276,17 +241,13 @@ BEGIN
tt.height
LIMIT 1;
- -- Si no le encuentra una balda adecuada, intentamos darle un carro entero si queda alguno libre
+ -- Si no le encuentra una balda, intentamos darle un carro entero libre
+
IF NOT (SELECT COUNT(*) FROM tTrain WHERE ticketFk) THEN
- SELECT tt.wagon
- INTO vFreeWagonFk
- FROM tTrain tt
- LEFT JOIN (
- SELECT DISTINCT wagon
- FROM tTrain
- WHERE ticketFk IS NOT NULL
- ) nn ON nn.wagon = tt.wagon
- WHERE nn.wagon IS NULL
+ SELECT wagon INTO vFreeWagonFk
+ FROM tTrain
+ GROUP BY wagon
+ HAVING COUNT(ticketFk) = 0
ORDER BY wagon
LIMIT 1;
@@ -295,38 +256,35 @@ BEGIN
SET ticketFk = vFirstTicketFk
WHERE wagon = vFreeWagonFk;
- -- Se anulan el resto de carros libres para que sólo uno lleve un pedido excesivo
- DELETE tt.*
- FROM tTrain tt
- LEFT JOIN (
- SELECT DISTINCT wagon
- FROM tTrain
- WHERE ticketFk IS NOT NULL
- ) nn ON nn.wagon = tt.wagon
- WHERE nn.wagon IS NULL;
- END IF;
- END IF;
+ -- Se anulan el resto de carros libres,
+ -- máximo un carro con pedido excesivo
- FETCH c1 INTO vTicketFk, vTicketLines, vTicketVolume;
- IF vDone OR NOT (SELECT COUNT(*) FROM tTrain WHERE ticketFk IS NULL) THEN
- LEAVE read_loop;
- END IF;
- ELSE
- FETCH c1 INTO vTicketFk, vTicketLines, vTicketVolume;
- IF vDone THEN
- LEAVE read_loop;
- END IF;
+ DELETE tt
+ FROM tTrain tt
+ JOIN (SELECT wagon
+ FROM tTrain
+ GROUP BY wagon
+ HAVING COUNT(ticketFk) = 0
+ ) sub ON sub.wagon = tt.wagon;
+ END IF;
+ END IF;
END IF;
END LOOP;
- CLOSE c1;
+ CLOSE vTickets;
IF (SELECT COUNT(*) FROM tTrain WHERE ticketFk) THEN
- UPDATE collection c
- JOIN state st ON st.code = 'ON_PREPARATION'
- SET c.stateFk = st.id
- WHERE c.id = vCollectionFk;
+ -- Se obtiene nº de colección
+
+ INSERT INTO collection
+ SET itemPackingTypeFk = vItemPackingTypeFk,
+ trainFk = vTrainFk,
+ wagons = vWagons,
+ warehouseFk = vWarehouseFk;
+
+ SELECT LAST_INSERT_ID() INTO vCollectionFk;
-- Asigna las bandejas
+
INSERT IGNORE INTO ticketCollection(ticketFk, collectionFk, `level`, wagon, liters)
SELECT tt.ticketFk, vCollectionFk, tt.shelve, tt.wagon, tt.liters
FROM tTrain tt
@@ -334,39 +292,36 @@ BEGIN
ORDER BY tt.wagon, tt.shelve;
-- Actualiza el estado de los tickets
- CALL collection_setState(vCollectionFk, vStateFk);
+
+ CALL collection_setState(vCollectionFk, vStateCode);
-- Aviso para la preparacion previa
+
INSERT INTO ticketDown(ticketFk, collectionFk)
SELECT tc.ticketFk, tc.collectionFk
FROM ticketCollection tc
WHERE tc.collectionFk = vCollectionFk;
- CALL sales_mergeByCollection(vCollectionFk);
+ CALL collection_mergeSales(vCollectionFk);
UPDATE `collection` c
- JOIN (
+ JOIN(
SELECT COUNT(*) saleTotalCount,
SUM(s.isPicked <> 0) salePickedCount
FROM ticketCollection tc
JOIN sale s ON s.ticketFk = tc.ticketFk
- WHERE tc.collectionFk = vCollectionFk
- AND s.quantity > 0
- ) sub
+ WHERE tc.collectionFk = vCollectionFk
+ AND s.quantity > 0
+ )sub
SET c.saleTotalCount = sub.saleTotalCount,
c.salePickedCount = sub.salePickedCount
WHERE c.id = vCollectionFk;
-
ELSE
- DELETE FROM `collection`
- WHERE id = vCollectionFk;
- SET vCollectionFk = NULL;
+ SET vCollectionFk = NULL;
END IF;
- DO RELEASE_LOCK(vLockName);
-
DROP TEMPORARY TABLE
tTrain,
tmp.productionBuffer;
END$$
-DELIMITER ;
+DELIMITER ;
\ No newline at end of file
diff --git a/db/routines/vn/procedures/invoiceOut_newFromAddress.sql b/db/routines/vn/procedures/invoiceOut_newFromAddress.sql
new file mode 100644
index 000000000..495ace608
--- /dev/null
+++ b/db/routines/vn/procedures/invoiceOut_newFromAddress.sql
@@ -0,0 +1,56 @@
+DELIMITER $$
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`invoiceOut_newFromAddress`(
+ IN vAddressFk INT,
+ IN vSerial CHAR(2),
+ IN vMaxShipped DATE,
+ IN vCompanyFk INT,
+ IN vTaxArea VARCHAR(25),
+ IN vRef VARCHAR(25),
+ OUT vInvoiceId INT)
+BEGIN
+/**
+ * Factura los tickets de un consignatario hasta una fecha dada
+ * @param vAddressFk Id del consignatario a facturar
+ * @param vSerial Serie de factura
+ * @param vMaxShipped Fecha hasta la cual cogerá tickets para facturar
+ * @param vCompanyFk Id de la empresa desde la que se factura
+ * @param vTaxArea Tipo de iva en relacion a la empresa y al cliente, NULL por defecto
+ * @param vRef Referencia de la factura en caso que se quiera forzar, NULL por defecto
+ * @return vInvoiceId factura
+ */
+ DECLARE vIsRefEditable BOOLEAN;
+
+ IF vRef IS NOT NULL AND vSerial IS NOT NULL THEN
+ SELECT isRefEditable INTO vIsRefEditable
+ FROM invoiceOutSerial
+ WHERE code = vSerial;
+
+ IF NOT vIsRefEditable THEN
+ CALL util.throw('serial non editable');
+ END IF;
+ END IF;
+
+ DROP TEMPORARY TABLE IF EXISTS `tmp`.`ticketToInvoice`;
+ CREATE TEMPORARY TABLE `tmp`.`ticketToInvoice`
+ (PRIMARY KEY (`id`))
+ ENGINE = MEMORY
+ SELECT id FROM ticket t
+ WHERE t.addressFk = vAddressFk
+ AND t.refFk IS NULL
+ AND t.companyFk = vCompanyFk
+ AND t.shipped BETWEEN
+ util.firstDayOfYear(vMaxShipped - INTERVAL 1 YEAR)
+ AND util.dayend(vMaxShipped);
+
+ CALL invoiceOut_new(vSerial, util.VN_CURDATE(), vTaxArea, vInvoiceId);
+
+ UPDATE invoiceOut
+ SET `ref` = vRef
+ WHERE id = vInvoiceId
+ AND vRef IS NOT NULL;
+
+ IF vSerial <> 'R' AND NOT ISNULL(vInvoiceId) AND vInvoiceId <> 0 THEN
+ CALL invoiceOutBooking(vInvoiceId);
+ END IF;
+END$$
+DELIMITER ;
diff --git a/db/routines/vn/procedures/itemMinimumQuantity_check.sql b/db/routines/vn/procedures/itemMinimumQuantity_check.sql
index fef7cdbdb..a4b15b90a 100644
--- a/db/routines/vn/procedures/itemMinimumQuantity_check.sql
+++ b/db/routines/vn/procedures/itemMinimumQuantity_check.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`itemMinimumQuantity_check`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`itemMinimumQuantity_check`(
vSelf INT,
vItemFk INT,
vStarted DATE,
diff --git a/db/routines/vn/procedures/itemShelvingSale_addBySaleGroup.sql b/db/routines/vn/procedures/itemShelvingSale_addBySaleGroup.sql
index 285b9f93f..08d09c63e 100644
--- a/db/routines/vn/procedures/itemShelvingSale_addBySaleGroup.sql
+++ b/db/routines/vn/procedures/itemShelvingSale_addBySaleGroup.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`itemShelvingSale_addBySaleGroup`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`itemShelvingSale_addBySaleGroup`(
vSaleGroupFk INT(11)
)
BEGIN
diff --git a/db/routines/vn/procedures/itemShelving_addList.sql b/db/routines/vn/procedures/itemShelving_addList.sql
index 05b392485..ade92b9fd 100644
--- a/db/routines/vn/procedures/itemShelving_addList.sql
+++ b/db/routines/vn/procedures/itemShelving_addList.sql
@@ -1,16 +1,22 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`itemShelving_addList`(vShelvingFk VARCHAR(3), vList TEXT, vIsChecking BOOL, vWarehouseFk INT)
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`itemShelving_addList`(
+ vShelvingFk VARCHAR(3),
+ vList TEXT,
+ vIsChecking BOOL,
+ vWarehouseFk INT
+)
BEGIN
-/* Recorre cada elemento en la colección vList.
+/**
+ * Recorre cada elemento en la colección vList.
* Si el parámetro isChecking = FALSE, llama a itemShelving_add.
*
* Cuando es TRUE sólo inserta los elementos de la colección que no están ya en
- * ese shelving, actualizando los valores del campo vn.itemShelving.isChecked
+ * ese shelving, actualizando los valores del campo itemShelving.isChecked
*
- * param vShelvingFk Identificador de vn.shelving
- * param vList JSON array con esta estructura: '[value1, value2, ...]'
- * param vIsChecking Define si hay que añadir o comprobar los items
- * param vWarehouseFk Identificador de vn.warehouse
+ * @param vShelvingFk Identificador de shelving
+ * @param vList JSON array con esta estructura: '[value1, value2, ...]'
+ * @param vIsChecking Define si hay que añadir o comprobar los items
+ * @param vWarehouseFk Identificador de warehouse
*/
DECLARE vListLength INT DEFAULT JSON_LENGTH(vList);
DECLARE vCounter INT DEFAULT 0;
@@ -20,26 +26,27 @@ BEGIN
DECLARE vIsChecked BOOL;
WHILE vCounter < vListLength DO
- SET vPath = CONCAT('$[',vCounter,']');
- SET vBarcode = JSON_EXTRACT(vList,vPath);
+ SET vPath = CONCAT('$[', vCounter, ']');
+ SET vBarcode = JSON_EXTRACT(vList, vPath);
SET vIsChecked = NULL;
IF vIsChecking THEN
SELECT barcodeToItem(vBarcode) INTO vItemFk;
- SELECT COUNT(*) INTO vIsChecked
- FROM vn.itemShelving
+ SELECT IF(COUNT(*), TRUE, FALSE) INTO vIsChecked
+ FROM itemShelving
WHERE shelvingFk COLLATE utf8_unicode_ci = vShelvingFk
AND itemFk = vItemFk;
END IF;
- IF NOT (vIsChecking AND vIsChecked) THEN
- CALL vn.itemShelving_add(vShelvingFk, vBarcode, 1, NULL, NULL, NULL, vWarehouseFk);
+ IF NOT vIsChecking OR NOT vIsChecked THEN
+ CALL itemShelving_add(vShelvingFk, vBarcode, 1, NULL, NULL, NULL, vWarehouseFk);
END IF;
- UPDATE vn.itemShelving
+ UPDATE itemShelving
SET isChecked = vIsChecked
WHERE shelvingFk COLLATE utf8_unicode_ci = vShelvingFk
- AND itemFk = vItemFk AND isChecked IS NULL;
+ AND itemFk = vItemFk
+ AND isChecked IS NULL;
SET vCounter = vCounter + 1;
END WHILE;
diff --git a/db/routines/vn/procedures/item_getSimilar.sql b/db/routines/vn/procedures/item_getSimilar.sql
index b524e30a7..56afd92e9 100644
--- a/db/routines/vn/procedures/item_getSimilar.sql
+++ b/db/routines/vn/procedures/item_getSimilar.sql
@@ -1,10 +1,10 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`item_getSimilar`(
- vSelf INT,
- vWarehouseFk INT,
- vDated DATE,
- vShowType BOOL,
- vDaysInForward INT
+ vSelf INT,
+ vWarehouseFk INT,
+ vDated DATE,
+ vShowType BOOL,
+ vDaysInForward INT
)
BEGIN
/**
@@ -17,48 +17,48 @@ BEGIN
* @param vShowType Mostrar tipos
* @param vDaysInForward Días de alcance para las ventas
*/
- DECLARE vAvailableCalcFk INT;
- DECLARE vPriority INT DEFAULT 1;
+ DECLARE vAvailableCalcFk INT;
+ DECLARE vPriority INT DEFAULT 1;
- CALL cache.available_refresh(vAvailableCalcFk, FALSE, vWarehouseFk, vDated);
+ CALL cache.available_refresh(vAvailableCalcFk, FALSE, vWarehouseFk, vDated);
- WITH itemTags AS (
- SELECT i.id,
- typeFk,
- tag5,
- value5,
- tag6,
- value6,
- tag7,
- value7,
- tag8,
- value8,
- t.name,
- it.value
+ WITH itemTags AS (
+ SELECT i.id,
+ typeFk,
+ tag5,
+ value5,
+ tag6,
+ value6,
+ tag7,
+ value7,
+ tag8,
+ value8,
+ t.name,
+ it.value
FROM vn.item i
LEFT JOIN vn.itemTag it ON it.itemFk = i.id
AND it.priority = vPriority
LEFT JOIN vn.tag t ON t.id = it.tagFk
WHERE i.id = vSelf
- ),
- stock AS (
- SELECT itemFk, SUM(visible) stock
+ ),
+ stock AS (
+ SELECT itemFk, SUM(visible) stock
FROM vn.itemShelvingStock
WHERE warehouseFk = vWarehouseFk
GROUP BY itemFk
- ),
- sold AS (
- SELECT SUM(s.quantity) quantity, s.itemFk
+ ),
+ sold AS (
+ SELECT SUM(s.quantity) quantity, s.itemFk
FROM vn.sale s
JOIN vn.ticket t ON t.id = s.ticketFk
LEFT JOIN vn.itemShelvingSale iss ON iss.saleFk = s.id
- WHERE t.shipped BETWEEN CURDATE() AND CURDATE() + INTERVAL vDaysInForward DAY
+ WHERE t.shipped >= CURDATE() + INTERVAL vDaysInForward DAY
AND iss.saleFk IS NULL
AND t.warehouseFk = vWarehouseFk
GROUP BY s.itemFk
- )
- SELECT i.id itemFk,
- CAST(sd.quantity AS INT) advanceable,
+ )
+ SELECT i.id itemFk,
+ LEAST(CAST(sd.quantity AS INT), sk.stock) advanceable,
i.longName,
i.subName,
i.tag5,
diff --git a/db/routines/vn/procedures/productionControl.sql b/db/routines/vn/procedures/productionControl.sql
index 0560cdd7e..1d206e20d 100644
--- a/db/routines/vn/procedures/productionControl.sql
+++ b/db/routines/vn/procedures/productionControl.sql
@@ -15,13 +15,11 @@ proc: BEGIN
DECLARE vEndingDate DATETIME;
DECLARE vIsTodayRelative BOOLEAN;
- SELECT util.dayEnd(util.VN_CURDATE()) + INTERVAL LEAST(vScopeDays, maxProductionScopeDays) DAY
- INTO vEndingDate
- FROM productionConfig;
-
- SELECT isTodayRelative INTO vIsTodayRelative
- FROM worker
- WHERE id = getUser(); -- Cambiar por account.myUser_getId(), falta dar permisos
+ SELECT w.isTodayRelative, util.dayEnd(util.VN_CURDATE()) + INTERVAL LEAST(vScopeDays, pc.maxProductionScopeDays) DAY
+ INTO vIsTodayRelative,vEndingDate
+ FROM worker w
+ JOIN productionConfig pc
+ WHERE w.id = account.myUser_getId();
CALL prepareTicketList(util.yesterday(), vEndingDate);
@@ -268,15 +266,14 @@ proc: BEGIN
UPDATE tmp.productionBuffer pb
JOIN sale s ON s.ticketFk = pb.ticketFk
JOIN item i ON i.id = s.itemFk
- JOIN cache.last_buy lb ON lb.warehouse_id = vWarehouseFk
+ JOIN cache.last_buy lb ON lb.warehouse_id = vWarehouseFk
AND lb.item_id = s.itemFk
JOIN buy b ON b.id = lb.buy_id
JOIN packaging p ON p.id = b.packagingFk
- JOIN productionConfig pc
SET pb.hasPlantTray = TRUE
WHERE p.isPlantTray
AND s.quantity >= b.packing
- AND pb.isOwn;
+ AND pb.isOwn;
DROP TEMPORARY TABLE
tmp.productionTicket,
diff --git a/db/routines/vn/procedures/stockBought_calculate.sql b/db/routines/vn/procedures/stockBought_calculate.sql
index 0930a86de..8b2a32e5d 100644
--- a/db/routines/vn/procedures/stockBought_calculate.sql
+++ b/db/routines/vn/procedures/stockBought_calculate.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`stockBought_calculate`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`stockBought_calculate`(
vDated DATE
)
proc: BEGIN
@@ -14,8 +14,8 @@ proc: BEGIN
END IF;
CREATE OR REPLACE TEMPORARY TABLE tStockBought
- SELECT workerFk, reserve
- FROM stockBought
+ SELECT workerFk, reserve
+ FROM stockBought
WHERE dated = vDated
AND reserve;
@@ -35,7 +35,7 @@ proc: BEGIN
LEFT JOIN tmp.item ti ON ti.itemFk = i.id
JOIN itemCategory ic ON ic.id = it.categoryFk
JOIN warehouse wh ON wh.code = 'VNH'
- JOIN tmp.buyUltimate bu ON bu.itemFk = i.id
+ JOIN tmp.buyUltimate bu ON bu.itemFk = i.id
AND bu.warehouseFk = wh.id
JOIN buy b ON b.id = bu.buyFk
JOIN volumeConfig vc
diff --git a/db/routines/vn/procedures/stockBuyedByWorker.sql b/db/routines/vn/procedures/stockBuyedByWorker.sql
deleted file mode 100644
index 13bda0133..000000000
--- a/db/routines/vn/procedures/stockBuyedByWorker.sql
+++ /dev/null
@@ -1,74 +0,0 @@
-DELIMITER $$
-CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`stockBuyedByWorker`(
- vDated DATE,
- vWorker INT
-)
-BEGIN
-/**
- * Inserta el volumen de compra de un comprador
- * en stockBuyed de acuerdo con la fecha.
- *
- * @param vDated Fecha de compra
- * @param vWorker Id de trabajador
- */
- CREATE OR REPLACE TEMPORARY TABLE tStockBuyed
- (INDEX (userFk))
- ENGINE = MEMORY
- SELECT requested, reserved, userFk
- FROM stockBuyed
- WHERE dated = vDated
- AND userFk = vWorker;
-
- DELETE FROM stockBuyed
- WHERE dated = vDated
- AND userFk = vWorker;
-
- CALL item_calculateStock(vDated);
-
- INSERT INTO stockBuyed(userFk, buyed, `dated`, reserved, requested, description)
- SELECT it.workerFk,
- SUM((ti.quantity / b.packing) * buy_getVolume(b.id)) / vc.palletM3 / 1000000,
- vDated,
- sb.reserved,
- sb.requested,
- u.name
- FROM itemType it
- JOIN item i ON i.typeFk = it.id
- LEFT JOIN tmp.item ti ON ti.itemFk = i.id
- JOIN itemCategory ic ON ic.id = it.categoryFk
- JOIN warehouse wh ON wh.code = 'VNH'
- JOIN tmp.buyUltimate bu ON bu.itemFk = i.id
- AND bu.warehouseFk = wh.id
- JOIN buy b ON b.id = bu.buyFk
- JOIN volumeConfig vc
- JOIN account.`user` u ON u.id = it.workerFk
- LEFT JOIN tStockBuyed sb ON sb.userFk = it.workerFk
- WHERE ic.display
- AND it.workerFk = vWorker;
-
- SELECT b.entryFk Id_Entrada,
- i.id Id_Article,
- i.name Article,
- ti.quantity Cantidad,
- (ac.conversionCoefficient * (ti.quantity / b.packing) * buy_getVolume(b.id))
- / (vc.trolleyM3 * 1000000) buyed,
- b.packagingFk id_cubo,
- b.packing
- FROM tmp.item ti
- JOIN item i ON i.id = ti.itemFk
- JOIN itemType it ON i.typeFk = it.id
- JOIN itemCategory ic ON ic.id = it.categoryFk
- JOIN worker w ON w.id = it.workerFk
- JOIN auctionConfig ac
- JOIN tmp.buyUltimate bu ON bu.itemFk = i.id
- AND bu.warehouseFk = ac.warehouseFk
- JOIN buy b ON b.id = bu.buyFk
- JOIN volumeConfig vc
- WHERE ic.display
- AND w.id = vWorker;
-
- DROP TEMPORARY TABLE tmp.buyUltimate,
- tmp.item,
- tStockBuyed;
-END$$
-DELIMITER ;
diff --git a/db/routines/vn/procedures/stockBuyed_add.sql b/db/routines/vn/procedures/stockBuyed_add.sql
deleted file mode 100644
index aab85e7fa..000000000
--- a/db/routines/vn/procedures/stockBuyed_add.sql
+++ /dev/null
@@ -1,70 +0,0 @@
-DELIMITER $$
-CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`stockBuyed_add`(
- vDated DATE
-)
-BEGIN
-/**
- * Inserta el volumen de compra por comprador
- * en stockBuyed de acuerdo con la fecha.
- *
- * @param vDated Fecha de compra
- */
- CREATE OR REPLACE TEMPORARY TABLE tStockBuyed
- (INDEX (userFk))
- ENGINE = MEMORY
- SELECT requested, reserved, userFk
- FROM stockBuyed
- WHERE dated = vDated;
-
- DELETE FROM stockBuyed WHERE dated = vDated;
-
- CALL item_calculateStock(vDated);
-
- INSERT INTO stockBuyed(userFk, buyed, `dated`, description)
- SELECT it.workerFk,
- SUM((ti.quantity / b.packing) * buy_getVolume(b.id)) / vc.palletM3 / 1000000,
- vDated,
- u.name
- FROM itemType it
- JOIN item i ON i.typeFk = it.id
- LEFT JOIN tmp.item ti ON ti.itemFk = i.id
- JOIN itemCategory ic ON ic.id = it.categoryFk
- JOIN warehouse wh ON wh.code = 'VNH'
- JOIN tmp.buyUltimate bu ON bu.itemFk = i.id AND bu.warehouseFk = wh.id
- JOIN buy b ON b.id = bu.buyFk
- JOIN volumeConfig vc
- JOIN account.`user` u ON u.id = it.workerFk
- JOIN workerDepartment wd ON wd.workerFk = u.id
- JOIN department d ON d.id = wd.departmentFk
- WHERE ic.display
- AND d.code IN ('shopping', 'logistic', 'franceTeam')
- GROUP BY it.workerFk;
-
- INSERT INTO stockBuyed(buyed, dated, description)
- SELECT SUM(ic.cm3 * ito.quantity / vc.palletM3 / 1000000),
- vDated,
- IF(c.code = 'ES', p.name, c.name) destiny
- FROM itemTicketOut ito
- JOIN ticket t ON t.id = ito.ticketFk
- JOIN `address` a ON a.id = t.addressFk
- JOIN province p ON p.id = a.provinceFk
- JOIN country c ON c.id = p.countryFk
- JOIN warehouse wh ON wh.id = t.warehouseFk
- JOIN itemCost ic ON ic.itemFk = ito.itemFk
- AND ic.warehouseFk = t.warehouseFk
- JOIN volumeConfig vc
- WHERE ito.shipped BETWEEN vDated AND util.dayend(vDated)
- AND wh.code = 'VNH'
- GROUP BY destiny;
-
- UPDATE stockBuyed s
- JOIN tStockBuyed ts ON ts.userFk = s.userFk
- SET s.requested = ts.requested,
- s.reserved = ts.reserved
- WHERE s.dated = vDated;
-
- DROP TEMPORARY TABLE tmp.buyUltimate,
- tmp.item,
- tStockBuyed;
-END$$
-DELIMITER ;
diff --git a/db/routines/vn/procedures/supplier_statementWithEntries.sql b/db/routines/vn/procedures/supplier_statementWithEntries.sql
index 55b271296..c0014f8e5 100644
--- a/db/routines/vn/procedures/supplier_statementWithEntries.sql
+++ b/db/routines/vn/procedures/supplier_statementWithEntries.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE vn.supplier_statementWithEntries(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE vn.supplier_statementWithEntries(
vSupplierFk INT,
vCurrencyFk INT,
vCompanyFk INT,
diff --git a/db/routines/vn/procedures/ticketRefund_upsert.sql b/db/routines/vn/procedures/ticketRefund_upsert.sql
new file mode 100644
index 000000000..fb22e6e8c
--- /dev/null
+++ b/db/routines/vn/procedures/ticketRefund_upsert.sql
@@ -0,0 +1,25 @@
+DELIMITER $$
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticketRefund_upsert`(
+ vRefundTicketFk INT,
+ vOriginalTicketFk INT
+)
+ READS SQL DATA
+BEGIN
+/**
+ * Common code for ticketRefund triggers
+ *
+ * @param vRefundTicketFk
+ * @param vOriginalTicketFk
+ */
+ DECLARE vIsDeleted BOOL;
+
+ SELECT COUNT(*) INTO vIsDeleted
+ FROM ticket
+ WHERE id IN (vRefundTicketFk, vOriginalTicketFk)
+ AND isDeleted;
+
+ IF vIsDeleted THEN
+ CALL util.throw('The refund ticket cannot be deleted tickets');
+ END IF;
+END$$
+DELIMITER ;
diff --git a/db/routines/vn/procedures/ticket_mergeSales.sql b/db/routines/vn/procedures/ticket_mergeSales.sql
index 28b2dc1c0..8ef5f6d90 100644
--- a/db/routines/vn/procedures/ticket_mergeSales.sql
+++ b/db/routines/vn/procedures/ticket_mergeSales.sql
@@ -1,14 +1,27 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_mergeSales`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_mergeSales`(
vSelf INT
)
BEGIN
+/**
+ * Para un ticket se agrupa las diferentes líneas de venta de un mismo artículo en una sola
+ * siempre y cuando tengan el mismo precio y dto.
+ *
+ * @param vSelf Id de ticket
+ */
+ DECLARE vHasSalesToMerge BOOL;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
RESIGNAL;
END;
+ START TRANSACTION;
+
+ SELECT id INTO vSelf
+ FROM ticket
+ WHERE id = vSelf FOR UPDATE;
+
CREATE OR REPLACE TEMPORARY TABLE tSalesToPreserve
(PRIMARY KEY (id))
ENGINE = MEMORY
@@ -18,26 +31,24 @@ BEGIN
JOIN itemType it ON it.id = i.typeFk
WHERE s.ticketFk = vSelf
AND it.isMergeable
- GROUP BY s.itemFk, s.price, s.discount;
+ GROUP BY s.itemFk, s.price, s.discount
+ HAVING COUNT(*) > 1;
- START TRANSACTION;
+ SELECT COUNT(*) INTO vHasSalesToMerge FROM tSalesToPreserve;
- UPDATE sale s
- JOIN tSalesToPreserve stp ON stp.id = s.id
- SET s.quantity = newQuantity
- WHERE s.ticketFk = vSelf;
+ IF vHasSalesToMerge THEN
+ UPDATE sale s
+ JOIN tSalesToPreserve stp ON stp.id = s.id
+ SET s.quantity = newQuantity;
- DELETE s.*
- FROM sale s
- LEFT JOIN tSalesToPreserve stp ON stp.id = s.id
- JOIN item i ON i.id = s.itemFk
- JOIN itemType it ON it.id = i.typeFk
- WHERE s.ticketFk = vSelf
- AND stp.id IS NULL
- AND it.isMergeable;
+ DELETE s
+ FROM sale s
+ JOIN tSalesToPreserve stp ON stp.itemFk = s.itemFk
+ WHERE s.ticketFk = vSelf
+ AND s.id <> stp.id;
+ END IF;
COMMIT;
-
DROP TEMPORARY TABLE tSalesToPreserve;
END$$
DELIMITER ;
diff --git a/db/routines/vn/procedures/ticket_setProblemRiskByClient.sql b/db/routines/vn/procedures/ticket_setProblemRiskByClient.sql
index 8479550de..1652fd29e 100644
--- a/db/routines/vn/procedures/ticket_setProblemRiskByClient.sql
+++ b/db/routines/vn/procedures/ticket_setProblemRiskByClient.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_setProblemRiskByClient`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_setProblemRiskByClient`(
vClientFk INT
)
BEGIN
diff --git a/db/routines/vn/procedures/ticket_setVolume.sql b/db/routines/vn/procedures/ticket_setVolume.sql
index d0fe9740c..c3cf0d057 100644
--- a/db/routines/vn/procedures/ticket_setVolume.sql
+++ b/db/routines/vn/procedures/ticket_setVolume.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_setVolume`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_setVolume`(
vSelf INT
)
BEGIN
diff --git a/db/routines/vn/procedures/ticket_setVolumeItemCost.sql b/db/routines/vn/procedures/ticket_setVolumeItemCost.sql
index d7fb4473d..3c23b7c34 100644
--- a/db/routines/vn/procedures/ticket_setVolumeItemCost.sql
+++ b/db/routines/vn/procedures/ticket_setVolumeItemCost.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_setVolumeItemCost`(
+CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_setVolumeItemCost`(
vItemFk INT
)
BEGIN
diff --git a/db/routines/vn/procedures/ticket_splitItemPackingType.sql b/db/routines/vn/procedures/ticket_splitItemPackingType.sql
index 0ee865af5..9a4bc01eb 100644
--- a/db/routines/vn/procedures/ticket_splitItemPackingType.sql
+++ b/db/routines/vn/procedures/ticket_splitItemPackingType.sql
@@ -3,124 +3,87 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_splitItemPacki
vSelf INT,
vOriginalItemPackingTypeFk VARCHAR(1)
)
-BEGIN
+proc:BEGIN
/**
- * Clona y reparte las ventas de un ticket en funcion del tipo de empaquetado.
- * Respeta el id inicial para el tipo propuesto.
+ * Clona y reparte las líneas de ventas de un ticket en funcion del tipo de empaquetado.
+ * Respeta el id de ticket original para el tipo de empaquetado propuesto.
*
* @param vSelf Id ticket
- * @param vOriginalItemPackingTypeFk Tipo para el que se reserva el número de ticket original
+ * @param vOriginalItemPackingTypeFk Tipo empaquetado que se mantiene el ticket original
* @return table tmp.ticketIPT(ticketFk, itemPackingTypeFk)
*/
- DECLARE vItemPackingTypeFk VARCHAR(1) DEFAULT 'H';
- DECLARE vNewTicketFk INT;
- DECLARE vPackingTypesToSplit INT;
DECLARE vDone INT DEFAULT FALSE;
+ DECLARE vHasItemPackingType BOOL;
+ DECLARE vItemPackingTypeFk INT;
+ DECLARE vNewTicketFk INT;
- DECLARE vSaleGroup CURSOR FOR
- SELECT itemPackingTypeFk
- FROM tSaleGroup
- WHERE itemPackingTypeFk IS NOT NULL
- ORDER BY (itemPackingTypeFk = vOriginalItemPackingTypeFk) DESC;
+ DECLARE vItemPackingTypes CURSOR FOR
+ SELECT DISTINCT itemPackingTypeFk
+ FROM tSalesToMove;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
- START TRANSACTION;
-
- SELECT id
- FROM sale
- WHERE ticketFk = vSelf
- AND NOT quantity
- FOR UPDATE;
-
- DELETE FROM sale
- WHERE NOT quantity
- AND ticketFk = vSelf;
-
- CREATE OR REPLACE TEMPORARY TABLE tSale
- (PRIMARY KEY (id))
- ENGINE = MEMORY
- SELECT s.id, i.itemPackingTypeFk, IFNULL(sv.litros, 0) litros
- FROM sale s
- JOIN item i ON i.id = s.itemFk
- LEFT JOIN saleVolume sv ON sv.saleFk = s.id
- WHERE s.ticketFk = vSelf;
-
- CREATE OR REPLACE TEMPORARY TABLE tSaleGroup
- ENGINE = MEMORY
- SELECT itemPackingTypeFk, SUM(litros) totalLitros
- FROM tSale
- GROUP BY itemPackingTypeFk;
-
- SELECT COUNT(*) INTO vPackingTypesToSplit
- FROM tSaleGroup
- WHERE itemPackingTypeFk IS NOT NULL;
+ SELECT COUNT(*) INTO vHasItemPackingType
+ FROM ticket t
+ JOIN sale s ON s.ticketFk = t.id
+ JOIN item i ON i.id = s.itemFk
+ WHERE t.id = vSelf
+ AND i.itemPackingTypeFk = vOriginalItemPackingTypeFk;
CREATE OR REPLACE TEMPORARY TABLE tmp.ticketIPT(
ticketFk INT,
itemPackingTypeFk VARCHAR(1)
- ) ENGINE = MEMORY;
+ ) ENGINE=MEMORY
+ SELECT vSelf ticketFk, vOriginalItemPackingTypeFk itemPackingTypeFk;
- CASE vPackingTypesToSplit
- WHEN 0 THEN
- INSERT INTO tmp.ticketIPT(ticketFk, itemPackingTypeFk)
- VALUES(vSelf, vItemPackingTypeFk);
- WHEN 1 THEN
- INSERT INTO tmp.ticketIPT(ticketFk, itemPackingTypeFk)
- SELECT vSelf, itemPackingTypeFk
- FROM tSaleGroup
- WHERE itemPackingTypeFk IS NOT NULL;
- ELSE
- OPEN vSaleGroup;
- FETCH vSaleGroup INTO vItemPackingTypeFk;
+ IF NOT vHasItemPackingType THEN
+ LEAVE proc;
+ END IF;
- INSERT INTO tmp.ticketIPT(ticketFk, itemPackingTypeFk)
- VALUES(vSelf, vItemPackingTypeFk);
+ CREATE OR REPLACE TEMPORARY TABLE tSalesToMove (
+ ticketFk INT,
+ saleFk INT,
+ itemPackingTypeFk INT
+ ) ENGINE=MEMORY;
- l: LOOP
- SET vDone = FALSE;
- FETCH vSaleGroup INTO vItemPackingTypeFk;
+ INSERT INTO tSalesToMove (saleFk, itemPackingTypeFk)
+ SELECT s.id, i.itemPackingTypeFk
+ FROM ticket t
+ JOIN sale s ON s.ticketFk = t.id
+ JOIN item i ON i.id = s.itemFk
+ WHERE t.id = vSelf
+ AND i.itemPackingTypeFk <> vOriginalItemPackingTypeFk;
- IF vDone THEN
- LEAVE l;
- END IF;
+ OPEN vItemPackingTypes;
- CALL ticket_Clone(vSelf, vNewTicketFk);
+ l: LOOP
+ SET vDone = FALSE;
+ FETCH vItemPackingTypes INTO vItemPackingTypeFk;
- INSERT INTO tmp.ticketIPT(ticketFk, itemPackingTypeFk)
- VALUES(vNewTicketFk, vItemPackingTypeFk);
- END LOOP;
+ IF vDone THEN
+ LEAVE l;
+ END IF;
- CLOSE vSaleGroup;
+ CALL ticket_Clone(vSelf, vNewTicketFk);
- SELECT s.id
- FROM sale s
- JOIN tSale ts ON ts.id = s.id
- JOIN tmp.ticketIPT t ON t.itemPackingTypeFk = ts.itemPackingTypeFk
- FOR UPDATE;
+ UPDATE tSalesToMove
+ SET ticketFk = vNewTicketFk
+ WHERE itemPackingTypeFk = vItemPackingTypeFk;
- UPDATE sale s
- JOIN tSale ts ON ts.id = s.id
- JOIN tmp.ticketIPT t ON t.itemPackingTypeFk = ts.itemPackingTypeFk
- SET s.ticketFk = t.ticketFk;
+ END LOOP;
- SELECT itemPackingTypeFk INTO vItemPackingTypeFk
- FROM tSaleGroup sg
- WHERE sg.itemPackingTypeFk IS NOT NULL
- ORDER BY sg.itemPackingTypeFk
- LIMIT 1;
+ CLOSE vItemPackingTypes;
- UPDATE sale s
- JOIN tSale ts ON ts.id = s.id
- JOIN tmp.ticketIPT t ON t.itemPackingTypeFk = vItemPackingTypeFk
- SET s.ticketFk = t.ticketFk
- WHERE ts.itemPackingTypeFk IS NULL;
- END CASE;
+ UPDATE sale s
+ JOIN tSalesToMove stm ON stm.saleFk = s.id
+ SET s.ticketFk = stm.ticketFk
+ WHERE stm.ticketFk;
- COMMIT;
+ INSERT INTO tmp.ticketIPT (ticketFk, itemPackingTypeFk)
+ SELECT ticketFk, itemPackingTypeFk
+ FROM tSalesToMove
+ GROUP BY ticketFk;
- DROP TEMPORARY TABLE
- tSale,
- tSaleGroup;
+ DROP TEMPORARY TABLE tSalesToMove;
END$$
DELIMITER ;
diff --git a/db/routines/vn/triggers/host_beforeInsert.sql b/db/routines/vn/triggers/host_beforeInsert.sql
index c2cb82334..96b78bfb7 100644
--- a/db/routines/vn/triggers/host_beforeInsert.sql
+++ b/db/routines/vn/triggers/host_beforeInsert.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`host_beforeInsert`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`host_beforeInsert`
BEFORE INSERT ON `host`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/itemShelving_afterInsert.sql b/db/routines/vn/triggers/itemShelving_afterInsert.sql
new file mode 100644
index 000000000..92243ca03
--- /dev/null
+++ b/db/routines/vn/triggers/itemShelving_afterInsert.sql
@@ -0,0 +1,18 @@
+DELIMITER $$
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`itemShelving_afterInsert`
+ AFTER INSERT ON `itemShelving`
+ FOR EACH ROW
+BEGIN
+ INSERT INTO itemShelvingLog
+ SET itemShelvingFk = NEW.id,
+ workerFk = account.myUser_getId(),
+ accion = 'CREA REGISTRO',
+ itemFk = NEW.itemFk,
+ shelvingFk = NEW.shelvingFk,
+ visible = NEW.visible,
+ `grouping` = NEW.`grouping`,
+ packing = NEW.packing,
+ available = NEW.available;
+
+END$$
+DELIMITER ;
diff --git a/db/routines/vn/triggers/roadmap_beforeInsert.sql b/db/routines/vn/triggers/roadmap_beforeInsert.sql
index df07d5540..2f9481140 100644
--- a/db/routines/vn/triggers/roadmap_beforeInsert.sql
+++ b/db/routines/vn/triggers/roadmap_beforeInsert.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`roadmap_beforeInsert`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`roadmap_beforeInsert`
BEFORE INSERT ON `roadmap`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/roadmap_beforeUpdate.sql b/db/routines/vn/triggers/roadmap_beforeUpdate.sql
index 4905a0442..a2a02e96a 100644
--- a/db/routines/vn/triggers/roadmap_beforeUpdate.sql
+++ b/db/routines/vn/triggers/roadmap_beforeUpdate.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`roadmap_beforeUpdate`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`roadmap_beforeUpdate`
BEFORE UPDATE ON `roadmap`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/saleGroupDetail._beforeInsert.sql b/db/routines/vn/triggers/saleGroupDetail._beforeInsert.sql
index 9513be46a..da975933c 100644
--- a/db/routines/vn/triggers/saleGroupDetail._beforeInsert.sql
+++ b/db/routines/vn/triggers/saleGroupDetail._beforeInsert.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`saleGroupDetail_beforeInsert`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`saleGroupDetail_beforeInsert`
BEFORE INSERT ON `saleGroupDetail`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/saleGroupDetail_afterDelete.sql b/db/routines/vn/triggers/saleGroupDetail_afterDelete.sql
index 1698ad8ce..37c3e9a2b 100644
--- a/db/routines/vn/triggers/saleGroupDetail_afterDelete.sql
+++ b/db/routines/vn/triggers/saleGroupDetail_afterDelete.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`saleGroupDetail_afterDelete`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`saleGroupDetail_afterDelete`
AFTER DELETE ON `saleGroupDetail`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/saleGroupDetail_beforeUpdate.sql b/db/routines/vn/triggers/saleGroupDetail_beforeUpdate.sql
index 0da18fd98..1f4238cdc 100644
--- a/db/routines/vn/triggers/saleGroupDetail_beforeUpdate.sql
+++ b/db/routines/vn/triggers/saleGroupDetail_beforeUpdate.sql
@@ -1,5 +1,5 @@
DELIMITER $$
-CREATE OR REPLACE DEFINER=`root`@`localhost` TRIGGER `vn`.`saleGroupDetail_beforeUpdate`
+CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`saleGroupDetail_beforeUpdate`
BEFORE UPDATE ON `saleGroupDetail`
FOR EACH ROW
BEGIN
diff --git a/db/routines/vn/triggers/ticketRefund_beforeInsert.sql b/db/routines/vn/triggers/ticketRefund_beforeInsert.sql
index 61d9fe7a2..dd1da6650 100644
--- a/db/routines/vn/triggers/ticketRefund_beforeInsert.sql
+++ b/db/routines/vn/triggers/ticketRefund_beforeInsert.sql
@@ -3,6 +3,8 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`ticketRefund_beforeInse
BEFORE INSERT ON `ticketRefund`
FOR EACH ROW
BEGIN
+ CALL ticketRefund_upsert(NEW.refundTicketFk, NEW.originalTicketFk);
+
SET NEW.editorFk = account.myUser_getId();
END$$
DELIMITER ;
diff --git a/db/routines/vn/triggers/ticketRefund_beforeUpdate.sql b/db/routines/vn/triggers/ticketRefund_beforeUpdate.sql
index 807695de6..f27e3f092 100644
--- a/db/routines/vn/triggers/ticketRefund_beforeUpdate.sql
+++ b/db/routines/vn/triggers/ticketRefund_beforeUpdate.sql
@@ -3,6 +3,8 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`ticketRefund_beforeUpda
BEFORE UPDATE ON `ticketRefund`
FOR EACH ROW
BEGIN
+ CALL ticketRefund_upsert(NEW.refundTicketFk, NEW.originalTicketFk);
+
SET NEW.editorFk = account.myUser_getId();
END$$
DELIMITER ;
diff --git a/db/routines/vn/triggers/travelThermograph_beforeInsert.sql b/db/routines/vn/triggers/travelThermograph_beforeInsert.sql
index f56109fba..256ee12a6 100644
--- a/db/routines/vn/triggers/travelThermograph_beforeInsert.sql
+++ b/db/routines/vn/triggers/travelThermograph_beforeInsert.sql
@@ -4,5 +4,14 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`travelThermograph_befor
FOR EACH ROW
BEGIN
SET NEW.editorFk = account.myUser_getId();
+
+ IF NEW.travelFk IS NULL AND
+ (SELECT COUNT(*) FROM travelThermograph
+ WHERE thermographFk = NEW.thermographFk
+ AND travelFk IS NULL
+ AND id <> NEW.id) > 0
+ THEN
+ CALL util.throw('Duplicate thermographFk without travelFk not allowed.');
+ END IF;
END$$
DELIMITER ;
diff --git a/db/routines/vn/triggers/travelThermograph_beforeUpdate.sql b/db/routines/vn/triggers/travelThermograph_beforeUpdate.sql
index 49f52f181..ffe81b38d 100644
--- a/db/routines/vn/triggers/travelThermograph_beforeUpdate.sql
+++ b/db/routines/vn/triggers/travelThermograph_beforeUpdate.sql
@@ -4,5 +4,14 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost` TRIGGER `vn`.`travelThermograph_befor
FOR EACH ROW
BEGIN
SET NEW.editorFk = account.myUser_getId();
+
+ IF NEW.travelFk IS NULL AND
+ (SELECT COUNT(*) FROM travelThermograph
+ WHERE thermographFk = NEW.thermographFk
+ AND travelFk IS NULL
+ AND id <> NEW.id) > 0
+ THEN
+ CALL util.throw('Duplicate thermographFk without travelFk not allowed.');
+ END IF;
END$$
DELIMITER ;
diff --git a/db/routines/vn/views/buyer.sql b/db/routines/vn/views/buyer.sql
index e690dc16f..4f668d35d 100644
--- a/db/routines/vn/views/buyer.sql
+++ b/db/routines/vn/views/buyer.sql
@@ -2,12 +2,10 @@ CREATE OR REPLACE DEFINER=`vn`@`localhost`
SQL SECURITY DEFINER
VIEW `vn`.`buyer`
AS SELECT DISTINCT `u`.`id` AS `userFk`,
- `u`.`nickname` AS `nickname`,
- `ic`.`display` AS `display`
+ `u`.`nickname` AS `nickname`
FROM (
`account`.`user` `u`
JOIN `vn`.`itemType` `it` ON(`it`.`workerFk` = `u`.`id`)
- JOIN `vn`.`itemCategory` `ic` ON(`ic`.`id` = `it`.`categoryFk`)
)
WHERE `u`.`active` <> 0
ORDER BY `u`.`nickname`
diff --git a/db/routines/vn2008/views/Agencias.sql b/db/routines/vn2008/views/Agencias.sql
index d70ec73f4..1176d02c4 100644
--- a/db/routines/vn2008/views/Agencias.sql
+++ b/db/routines/vn2008/views/Agencias.sql
@@ -13,6 +13,5 @@ AS SELECT `am`.`id` AS `Id_Agencia`,
`am`.`reportMail` AS `send_mail`,
`am`.`isActive` AS `tpv`,
`am`.`code` AS `code`,
- `am`.`showAgencyName` AS `show_AgencyName`,
`am`.`isRiskFree` AS `isRiskFree`
FROM `vn`.`agencyMode` `am`
diff --git a/db/routines/vn2008/views/Cubos.sql b/db/routines/vn2008/views/Cubos.sql
index 4ece9c435..1b23af4fc 100644
--- a/db/routines/vn2008/views/Cubos.sql
+++ b/db/routines/vn2008/views/Cubos.sql
@@ -17,5 +17,6 @@ AS SELECT `p`.`id` AS `Id_Cubo`,
`p`.`upload` AS `Suben`,
`p`.`base` AS `Base`,
`p`.`isBox` AS `box`,
- `p`.`returnCost` AS `costeRetorno`
+ `p`.`returnCost` AS `costeRetorno`,
+ `p`.`isActive` AS `isActive`
FROM `vn`.`packaging` `p`
diff --git a/db/versions/11198-blackPhormium/00-firstScript.sql b/db/versions/11198-blackPhormium/00-firstScript.sql
new file mode 100644
index 000000000..6c181ed21
--- /dev/null
+++ b/db/versions/11198-blackPhormium/00-firstScript.sql
@@ -0,0 +1,7 @@
+UPDATE vn.itemShelving
+ SET isChecked = TRUE
+ WHERE isChecked;
+
+UPDATE vn.itemShelving
+ SET isChecked = FALSE
+ WHERE NOT isChecked;
diff --git a/db/versions/11261-bronzeDracena/00-firstScript.sql b/db/versions/11261-bronzeDracena/00-firstScript.sql
new file mode 100644
index 000000000..1ef944db2
--- /dev/null
+++ b/db/versions/11261-bronzeDracena/00-firstScript.sql
@@ -0,0 +1,2 @@
+ALTER TABLE vn.agencyMode
+ CHANGE IF EXISTS showAgencyName showAgencyName__ tinyint(1) DEFAULT 1 COMMENT '@deprecated 2024-09-24';
\ No newline at end of file
diff --git a/db/versions/11262-chocolateCamellia/00-firstScript.sql b/db/versions/11262-chocolateCamellia/00-firstScript.sql
new file mode 100644
index 000000000..79910fa76
--- /dev/null
+++ b/db/versions/11262-chocolateCamellia/00-firstScript.sql
@@ -0,0 +1,31 @@
+-- vn.priceDelta definition
+
+CREATE OR REPLACE TABLE vn.priceDelta (
+ `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
+ `itemTypeFk` smallint(5) unsigned NOT NULL,
+ `minSize` int(10) unsigned DEFAULT NULL COMMENT 'Minimum item.size',
+ `maxSize` int(10) unsigned DEFAULT NULL COMMENT 'Maximum item.size',
+ `inkFk` varchar(3) DEFAULT NULL,
+ `originFk` tinyint(2) unsigned DEFAULT NULL,
+ `producerFk` mediumint(3) unsigned DEFAULT NULL,
+ `fromDated` date DEFAULT NULL,
+ `toDated` date DEFAULT NULL,
+ `absIncreasing` decimal(10,3) DEFAULT NULL COMMENT 'Absolute increasing of final price',
+ `ratIncreasing` int(11) DEFAULT NULL COMMENT 'Increasing ratio for the cost price',
+ `warehouseFk` smallint(6) unsigned NOT NULL,
+ `created` timestamp NOT NULL DEFAULT current_timestamp(),
+ `editorFk` int(10) unsigned DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `priceDelta_itemType_FK` (`itemTypeFk`),
+ KEY `priceDelta_ink_FK` (`inkFk`),
+ KEY `priceDelta_producer_FK` (`producerFk`),
+ KEY `priceDelta_warehouse_FK` (`warehouseFk`),
+ KEY `priceDelta_worker_FK` (`editorFk`),
+ CONSTRAINT `priceDelta_ink_FK` FOREIGN KEY (`inkFk`) REFERENCES `ink` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_itemType_FK` FOREIGN KEY (`itemTypeFk`) REFERENCES `itemType` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_producer_FK` FOREIGN KEY (`producerFk`) REFERENCES `producer` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_warehouse_FK` FOREIGN KEY (`warehouseFk`) REFERENCES `warehouse` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_worker_FK` FOREIGN KEY (`editorFk`) REFERENCES `worker` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
+) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci COMMENT='Defines the increasing o decreasing for ranges of items';
+
+GRANT INSERT, SELECT, UPDATE, DELETE ON TABLE vn.priceDelta TO buyer;
\ No newline at end of file
diff --git a/db/versions/11263-brownAnthurium/00-firstScript.sql b/db/versions/11263-brownAnthurium/00-firstScript.sql
new file mode 100644
index 000000000..0824ea5f7
--- /dev/null
+++ b/db/versions/11263-brownAnthurium/00-firstScript.sql
@@ -0,0 +1,32 @@
+-- Place your SQL code here
+-- vn.priceDelta definition
+
+CREATE OR REPLACE TABLE vn.priceDelta (
+ `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
+ `itemTypeFk` smallint(5) unsigned NOT NULL,
+ `minSize` int(10) unsigned DEFAULT NULL COMMENT 'Minimum item.size',
+ `maxSize` int(10) unsigned DEFAULT NULL COMMENT 'Maximum item.size',
+ `inkFk` varchar(3) DEFAULT NULL,
+ `originFk` tinyint(2) unsigned DEFAULT NULL,
+ `producerFk` mediumint(3) unsigned DEFAULT NULL,
+ `fromDated` date DEFAULT NULL,
+ `toDated` date DEFAULT NULL,
+ `absIncreasing` decimal(10,3) DEFAULT NULL COMMENT 'Absolute increasing of final price',
+ `ratIncreasing` int(11) DEFAULT NULL COMMENT 'Increasing ratio for the cost price',
+ `warehouseFk` smallint(6) unsigned NOT NULL,
+ `created` timestamp NOT NULL DEFAULT current_timestamp(),
+ `editorFk` int(10) unsigned DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ KEY `priceDelta_itemType_FK` (`itemTypeFk`),
+ KEY `priceDelta_ink_FK` (`inkFk`),
+ KEY `priceDelta_producer_FK` (`producerFk`),
+ KEY `priceDelta_warehouse_FK` (`warehouseFk`),
+ KEY `priceDelta_worker_FK` (`editorFk`),
+ CONSTRAINT `priceDelta_ink_FK` FOREIGN KEY (`inkFk`) REFERENCES `ink` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_itemType_FK` FOREIGN KEY (`itemTypeFk`) REFERENCES `itemType` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_producer_FK` FOREIGN KEY (`producerFk`) REFERENCES `producer` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_warehouse_FK` FOREIGN KEY (`warehouseFk`) REFERENCES `warehouse` (`id`) ON UPDATE CASCADE,
+ CONSTRAINT `priceDelta_worker_FK` FOREIGN KEY (`editorFk`) REFERENCES `worker` (`id`) ON DELETE SET NULL ON UPDATE CASCADE
+) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci COMMENT='Defines the increasing o decreasing for ranges of items';
+
+GRANT INSERT, SELECT, UPDATE, DELETE ON TABLE vn.priceDelta TO buyer;
\ No newline at end of file
diff --git a/db/versions/11271-blackMastic/00-firstScript.sql b/db/versions/11271-blackMastic/00-firstScript.sql
new file mode 100644
index 000000000..dcc8349a5
--- /dev/null
+++ b/db/versions/11271-blackMastic/00-firstScript.sql
@@ -0,0 +1,3 @@
+-- Place your SQL code here
+ALTER TABLE vn.priceDelta ADD IF NOT EXISTS zoneGeoFk int(11) NULL COMMENT 'Application area for the bonus component';
+ALTER TABLE vn.priceDelta ADD CONSTRAINT priceDelta_zoneGeo_FK FOREIGN KEY IF NOT EXISTS (zoneGeoFk) REFERENCES vn.zoneGeo(id) ON DELETE RESTRICT ON UPDATE CASCADE;
diff --git a/db/versions/11272-azureLilium/00-firstScript.sql b/db/versions/11272-azureLilium/00-firstScript.sql
new file mode 100644
index 000000000..0194ece18
--- /dev/null
+++ b/db/versions/11272-azureLilium/00-firstScript.sql
@@ -0,0 +1,4 @@
+-- Place your SQL code here
+RENAME TABLE vn.stockBuyed TO vn.stockBuyed__;
+ALTER TABLE vn.stockBuyed__
+COMMENT='@deprecated 2024-10-01 rename and refactor to stockBought';
diff --git a/db/versions/11274-redGerbera/00-firstScript copy 2.sql b/db/versions/11274-redGerbera/00-firstScript copy 2.sql
new file mode 100644
index 000000000..452accf2e
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy 2.sql
@@ -0,0 +1 @@
+ALTER TABLE vn.address MODIFY COLUMN isEqualizated tinyint(1) DEFAULT FALSE NOT NULL;
diff --git a/db/versions/11274-redGerbera/00-firstScript copy 3.sql b/db/versions/11274-redGerbera/00-firstScript copy 3.sql
new file mode 100644
index 000000000..c1f574379
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy 3.sql
@@ -0,0 +1 @@
+ALTER TABLE vn.autonomy MODIFY COLUMN isUeeMember tinyint(1) DEFAULT FALSE NOT NULL;
\ No newline at end of file
diff --git a/db/versions/11274-redGerbera/00-firstScript copy 4.sql b/db/versions/11274-redGerbera/00-firstScript copy 4.sql
new file mode 100644
index 000000000..18a4d3314
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy 4.sql
@@ -0,0 +1 @@
+ALTER TABLE vn.chat MODIFY COLUMN checkUserStatus tinyint(1) DEFAULT FALSE NOT NULL;
diff --git a/db/versions/11274-redGerbera/00-firstScript copy 5.sql b/db/versions/11274-redGerbera/00-firstScript copy 5.sql
new file mode 100644
index 000000000..c75965735
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy 5.sql
@@ -0,0 +1,3 @@
+ALTER TABLE vn.warehouse
+ MODIFY COLUMN isOrigin tinyint(1) DEFAULT FALSE NOT NULL,
+ MODIFY COLUMN isDestiny tinyint(1) DEFAULT FALSE NOT NULL;
\ No newline at end of file
diff --git a/db/versions/11274-redGerbera/00-firstScript copy 6.sql b/db/versions/11274-redGerbera/00-firstScript copy 6.sql
new file mode 100644
index 000000000..63b942e9d
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy 6.sql
@@ -0,0 +1 @@
+ALTER TABLE vn.zoneIncluded MODIFY COLUMN isIncluded tinyint(1) DEFAULT FALSE NOT NULL;
diff --git a/db/versions/11274-redGerbera/00-firstScript copy.sql b/db/versions/11274-redGerbera/00-firstScript copy.sql
new file mode 100644
index 000000000..f14ff371d
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript copy.sql
@@ -0,0 +1 @@
+ALTER TABLE bs.defaulter MODIFY COLUMN hasChanged tinyint(1) DEFAULT FALSE NOT NULL;
diff --git a/db/versions/11274-redGerbera/00-firstScript.sql b/db/versions/11274-redGerbera/00-firstScript.sql
new file mode 100644
index 000000000..8bcf7e027
--- /dev/null
+++ b/db/versions/11274-redGerbera/00-firstScript.sql
@@ -0,0 +1 @@
+ALTER TABLE account.user MODIFY COLUMN emailVerified tinyint(1) DEFAULT FALSE NOT NULL;
diff --git a/db/versions/11277-wheatChico/00-firstScript.sql b/db/versions/11277-wheatChico/00-firstScript.sql
new file mode 100644
index 000000000..c2b5963a4
--- /dev/null
+++ b/db/versions/11277-wheatChico/00-firstScript.sql
@@ -0,0 +1,3 @@
+-- Place your SQL code here
+ALTER TABLE hedera.`order` ADD IF NOT EXISTS rowUpdated DATETIME NULL
+ COMMENT 'Timestamp for last updated record in orderRow table';
diff --git a/db/versions/11278-crimsonEucalyptus/00-firstScript.sql b/db/versions/11278-crimsonEucalyptus/00-firstScript.sql
new file mode 100644
index 000000000..f69f75f1d
--- /dev/null
+++ b/db/versions/11278-crimsonEucalyptus/00-firstScript.sql
@@ -0,0 +1,6 @@
+-- Place your SQL code here
+
+ALTER TABLE vn.priceDelta ADD IF NOT EXISTS zoneGeoFk int(11) NULL;
+
+ALTER TABLE vn.priceDelta ADD CONSTRAINT priceDelta_zoneGeo_FK FOREIGN KEY IF NOT EXISTS (zoneGeoFk)
+REFERENCES vn.zoneGeo (`id`) ON DELETE RESTRICT ON UPDATE CASCADE;
diff --git a/db/versions/11279-turquoiseDendro/00-firstScript.sql b/db/versions/11279-turquoiseDendro/00-firstScript.sql
new file mode 100644
index 000000000..a241e6af2
--- /dev/null
+++ b/db/versions/11279-turquoiseDendro/00-firstScript.sql
@@ -0,0 +1,6 @@
+-- Place your SQL code here
+
+ALTER TABLE vn.priceDelta ADD IF NOT EXISTS zoneGeoFk int(11) NULL;
+
+ALTER TABLE vn.priceDelta ADD CONSTRAINT priceDelta_zoneGeo_FK FOREIGN KEY IF NOT EXISTS (zoneGeoFk)
+REFERENCES vn.zoneGeo (`id`) ON DELETE RESTRICT ON UPDATE CASCADE;
\ No newline at end of file
diff --git a/db/versions/11280-goldenCamellia/00-firstScript.sql b/db/versions/11280-goldenCamellia/00-firstScript.sql
new file mode 100644
index 000000000..fd55760c1
--- /dev/null
+++ b/db/versions/11280-goldenCamellia/00-firstScript.sql
@@ -0,0 +1,24 @@
+ALTER TABLE `vn`.`packaging`
+ ADD COLUMN IF NOT EXISTS `isActive` TINYINT(1) DEFAULT 1;
+
+UPDATE vn.packaging
+ SET isActive = FALSE
+ WHERE id IN('06x04x06','07x04x03','1000','100SM','1031','104','105','1060','10x04x06','10x04x07','1100','118','119','1200','129','1300',
+ '134','146','147','148','158','159','17x01x02','17X01X03','17x01x04','17x01x05','18X01X04','198','199',
+ '20P','20x01x03','246','273','278','279','280','290','359','37247','382','40P','453','463','464','465','466',
+ '467','469','471','473','494','508','509','511','512','514','515','516','518','519-50B','575','598-3x6','604','605','606',
+ '607','609','647','67515','676','680','682','685','687','688','691','692','693','694','695','730','751','7808','790','7910',
+ '7920','79450','7950','7952','7960','7976','7982','7986','7988',
+ '7993','8000','8046','8049','8053','8057','8058','8065','8076','8085','8086','8088',
+ '8091','8095','8096','8097','8101','8106','8108','8110','8112','8124','8134','8140','8141','8143','8145','8149','8150',
+ '8170','8174','8192','8200','8210','8249','8270','8275','8288','8300','8350','8375','8399','8400','8420','845','847','8480','8500',
+ '855','858','8600','862','869','871','872','8720','878','879','880','8800','882','885','910','911','912','914','916','917','918','919',
+ '920','921','922','923','924','925','926','927','930','9300','932','934','935','936','938','942','948','9600','980','984','9920',
+ 'B20x16','B43x13','Bande Rota','bb3','Bcesta','BcestaOVAL','BcestaRED','Bcirios','BciriosG','BjarronBLN','BjarronNGR',
+ 'Btazon','Bvelas','cactus200','Caja040','CajaTGLF','CC Alza Pl','CC_falso',
+ 'EB-RSMINA','EMB 1_4','EMB 2_5','espuma','FB-BENCH','granel','Grenex','guzma1200','guzma1400','guzma330','guzma400','guzma650','guzma900','HB-ALEX',
+ 'HB-APOSENT','HB-MAGIC','HB-NATUF','HB-RSMINA','HB-TES-RSR','HB068','HB117','HB2-CIRCA','JB-AROMA','jumboX3','kalan330','kalan400',
+ 'kalan577','kalan900','L12','L120','L14','L2-120','L200','L3-120','L4-120','L44','L6','L6-180','L8','L8-200','MB-BENCH','MBOLA','mc_11',
+ 'mc_13','Msp','NO VALIDO','NO-002','PANIC','PBLG','PISOCC/3','PISOCC/4','PISOCC/5','PISOCC/6',
+ 'procona','QB-CARDENA','QB-PANDERO','QB-TES-RSR','QB7-TOSCA','QB9-TOSCA','RB-BENCH','SemiEuroPa','spolette','t_flori11','T26x23',
+ 'T26x25','T27x24','T27x30','T28x26','T30x24','T33x30','THA50','ti_13','Tumbado','UB-BENCH')
\ No newline at end of file
diff --git a/e2e/paths/01-salix/03_smartTable_searchBar_integrations.spec.js b/e2e/paths/01-salix/03_smartTable_searchBar_integrations.spec.js
deleted file mode 100644
index 9c37ce9ba..000000000
--- a/e2e/paths/01-salix/03_smartTable_searchBar_integrations.spec.js
+++ /dev/null
@@ -1,68 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('SmartTable SearchBar integration', () => {
- let browser;
- let page;
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('salesPerson', 'item');
- await page.waitToClick(selectors.globalItems.searchButton);
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should search by type in searchBar, reload page and have same results', async() => {
- await page.waitToClick(selectors.itemsIndex.openAdvancedSearchButton);
- await page.autocompleteSearch(selectors.itemsIndex.advancedSearchItemType, 'Anthurium');
- await page.waitToClick(selectors.itemsIndex.advancedSearchButton);
- await page.waitForNumberOfElements(selectors.itemsIndex.searchResult, 4);
-
- await page.reload({
- waitUntil: 'networkidle2'
- });
-
- await page.waitForNumberOfElements(selectors.itemsIndex.searchResult, 4);
-
- await page.write(selectors.itemsIndex.advancedSmartTableGrouping, '1');
- await page.keyboard.press('Enter');
- await page.waitForNumberOfElements(selectors.itemsIndex.searchResult, 2);
-
- await page.reload({
- waitUntil: 'networkidle2'
- });
-
- await page.waitForNumberOfElements(selectors.itemsIndex.searchResult, 1);
- });
-
- it('should filter in section without smart-table and search in searchBar go to zone section', async() => {
- await page.loginAndModule('salesPerson', 'zone');
- await page.waitToClick(selectors.globalItems.searchButton);
-
- await page.doSearch('A');
- const firstCount = await page.countElement(selectors.zoneIndex.searchResult);
-
- await page.doSearch('A');
- const secondCount = await page.countElement(selectors.zoneIndex.searchResult);
-
- expect(firstCount).toEqual(7);
- expect(secondCount).toEqual(7);
- });
-
- it('should order orders by first id and order by last id, reload page and have same order', async() => {
- await page.loginAndModule('developer', 'item');
- await page.accessToSection('item.fixedPrice');
- await page.keyboard.press('Enter');
-
- await page.waitForTextInField(selectors.itemFixedPrice.firstItemID, '1');
-
- await page.waitToClick(selectors.itemFixedPrice.orderColumnId);
- await page.reload({
- waitUntil: 'networkidle2'
- });
- await page.waitForTextInField(selectors.itemFixedPrice.firstItemID, '3');
- });
-});
diff --git a/e2e/paths/10-travel/01_create.spec.js b/e2e/paths/10-travel/01_create.spec.js
deleted file mode 100644
index 98ade4852..000000000
--- a/e2e/paths/10-travel/01_create.spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel create path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should create a new travel and check it was created with the correct data', async() => {
- const date = Date.vnNew();
- date.setDate(15);
- date.setUTCHours(0, 0, 0, 0);
-
- await page.waitToClick(selectors.travelIndex.newTravelButton);
- await page.waitForState('travel.create');
-
- const values = {
- reference: 'Testing reference',
- agencyMode: 'inhouse pickup',
- shipped: date,
- landed: date,
- warehouseOut: 'Warehouse One',
- warehouseIn: 'Warehouse Five'
- };
-
- const message = await page.sendForm('vn-travel-create form', values);
- await page.waitForState('travel.card.basicData');
- const formValues = await page.fetchForm('vn-travel-basic-data form', Object.keys(values));
-
- expect(message.isSuccess).toBeTrue();
- expect(formValues).toEqual(values);
- });
-});
diff --git a/e2e/paths/10-travel/02_basic_data_and_log.spec.js b/e2e/paths/10-travel/02_basic_data_and_log.spec.js
deleted file mode 100644
index 701e6b1b4..000000000
--- a/e2e/paths/10-travel/02_basic_data_and_log.spec.js
+++ /dev/null
@@ -1,97 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel basic data path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- await page.write(selectors.travelIndex.generalSearchFilter, '3');
- await page.keyboard.press('Enter');
- await page.accessToSection('travel.card.basicData');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should reach the thermograph section', async() => {
- await page.waitForState('travel.card.basicData');
- });
-
- it('should set a wrong delivery date then receive an error on submit', async() => {
- await page.loginAndModule('buyer', 'travel');
- await page.write(selectors.travelIndex.generalSearchFilter, '4');
- await page.keyboard.press('Enter');
- await page.accessToSection('travel.card.basicData');
- await page.waitForState('travel.card.basicData');
-
- const lastMonth = Date.vnNew();
- lastMonth.setMonth(lastMonth.getMonth() - 2);
-
- await page.pickDate(selectors.travelBasicData.deliveryDate, lastMonth);
- await page.waitToClick(selectors.travelBasicData.save);
- const message = await page.waitForSnackbar();
-
- expect(message.text).toContain('Landing cannot be lesser than shipment');
- });
-
- it('should undo the changes', async() => {
- await page.clearInput(selectors.travelBasicData.reference);
- await page.write(selectors.travelBasicData.reference, 'totally pointless ref');
- await page.waitToClick(selectors.travelBasicData.undoChanges);
- const result = await page.waitToGetProperty(selectors.travelBasicData.reference, 'value');
-
- expect(result).toEqual('fourth travel');
- });
-
- it('should now edit the whole form then save', async() => {
- await page.clearInput(selectors.travelBasicData.reference);
- await page.write(selectors.travelBasicData.reference, 'new reference!');
- await page.autocompleteSearch(selectors.travelBasicData.agency, 'Entanglement');
- await page.autocompleteSearch(selectors.travelBasicData.outputWarehouse, 'Warehouse Three');
- await page.autocompleteSearch(selectors.travelBasicData.inputWarehouse, 'Warehouse Four');
- await page.waitToClick(selectors.travelBasicData.delivered);
- await page.waitToClick(selectors.travelBasicData.received);
- await page.waitToClick(selectors.travelBasicData.save);
- const message = await page.waitForSnackbar();
-
- expect(message.text).toContain('Data saved!');
- });
-
- it('should reload the section and check the reference was saved', async() => {
- await page.reloadSection('travel.card.basicData');
- const result = await page.waitToGetProperty(selectors.travelBasicData.reference, 'value');
-
- expect(result).toEqual('new reference!');
- });
-
- it('should check the agency was saved', async() => {
- const result = await page.waitToGetProperty(selectors.travelBasicData.agency, 'value');
-
- expect(result).toEqual('Entanglement');
- });
-
- it('should check the output warehouse date was saved', async() => {
- const result = await page.waitToGetProperty(selectors.travelBasicData.outputWarehouse, 'value');
-
- expect(result).toEqual('Warehouse Three');
- });
-
- it('should check the input warehouse date was saved', async() => {
- const result = await page.waitToGetProperty(selectors.travelBasicData.inputWarehouse, 'value');
-
- expect(result).toEqual('Warehouse Four');
- });
-
- it(`should check the delivered checkbox was saved even tho it doesn't make sense`, async() => {
- await page.waitForClassPresent(selectors.travelBasicData.delivered, 'checked');
- });
-
- it(`should check the received checkbox was saved even tho it doesn't make sense`, async() => {
- await page.waitForClassPresent(selectors.travelBasicData.received, 'checked');
- });
-});
diff --git a/e2e/paths/10-travel/03_descriptor.spec.js b/e2e/paths/10-travel/03_descriptor.spec.js
deleted file mode 100644
index f066a74ca..000000000
--- a/e2e/paths/10-travel/03_descriptor.spec.js
+++ /dev/null
@@ -1,36 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel descriptor path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- await page.write(selectors.travelIndex.generalSearchFilter, '3');
- await page.keyboard.press('Enter');
- await page.waitForState('travel.card.summary');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should click the descriptor button to navigate to the travel index showing all travels with current agency', async() => {
- await page.waitToClick(selectors.travelDescriptor.filterByAgencyButton);
- await page.waitForState('travel.index');
- const result = await page.countElement(selectors.travelIndex.anySearchResult);
-
- expect(result).toBeGreaterThanOrEqual(1);
- });
-
- it('should navigate to the first search result', async() => {
- await page.waitToClick(selectors.travelIndex.firstSearchResult);
- await page.waitForState('travel.card.summary');
- const state = await page.getState();
-
- expect(state).toBe('travel.card.summary');
- });
-});
diff --git a/e2e/paths/10-travel/04_extra_community.spec.js b/e2e/paths/10-travel/04_extra_community.spec.js
deleted file mode 100644
index c5975c958..000000000
--- a/e2e/paths/10-travel/04_extra_community.spec.js
+++ /dev/null
@@ -1,42 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel extra community path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- await page.accessToSection('travel.extraCommunity');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should edit the travel reference and the locked kilograms', async() => {
- await page.waitToClick(selectors.travelExtraCommunity.removeContinentFilter);
- await page.waitForSpinnerLoad();
- await page.writeOnEditableTD(selectors.travelExtraCommunity.firstTravelReference, 'edited reference');
- await page.waitForSpinnerLoad();
- await page.writeOnEditableTD(selectors.travelExtraCommunity.firstTravelLockedKg, '1500');
-
- const message = await page.waitForSnackbar();
-
- expect(message.text).toContain('Data saved!');
- });
-
- it('should reload the index and confirm the reference and locked kg were edited', async() => {
- await page.accessToSection('travel.index');
- await page.accessToSection('travel.extraCommunity');
- await page.waitToClick(selectors.travelExtraCommunity.removeContinentFilter);
- await page.waitForTextInElement(selectors.travelExtraCommunity.firstTravelReference, 'edited reference');
- const reference = await page.getProperty(selectors.travelExtraCommunity.firstTravelReference, 'innerText');
- const lockedKg = await page.getProperty(selectors.travelExtraCommunity.firstTravelLockedKg, 'innerText');
-
- expect(reference).toContain('edited reference');
- expect(lockedKg).toContain(1500);
- });
-});
diff --git a/e2e/paths/10-travel/05_thermograph.spec.js b/e2e/paths/10-travel/05_thermograph.spec.js
deleted file mode 100644
index c9709f2f5..000000000
--- a/e2e/paths/10-travel/05_thermograph.spec.js
+++ /dev/null
@@ -1,64 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel thermograph path', () => {
- const thermographName = '7H3-37H3RN4L-FL4M3';
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- await page.write(selectors.travelIndex.generalSearchFilter, '3');
- await page.keyboard.press('Enter');
- await page.accessToSection('travel.card.thermograph.index');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should reach the thermograph section', async() => {
- await page.waitForState('travel.card.thermograph.index');
- });
-
- it('should click the add thermograph floating button', async() => {
- await page.waitToClick(selectors.travelThermograph.add);
- await page.waitForState('travel.card.thermograph.create');
- });
-
- it('should click on the add thermograph icon of the thermograph autocomplete', async() => {
- await page.waitToClick(selectors.travelThermograph.addThermographIcon);
- await page.write(selectors.travelThermograph.newThermographId, thermographName);
- await page.autocompleteSearch(selectors.travelThermograph.newThermographModel, 'TEMPMATE');
- await page.autocompleteSearch(selectors.travelThermograph.newThermographWarehouse, 'Warehouse Two');
- await page.autocompleteSearch(selectors.travelThermograph.newThermographTemperature, 'Warm');
- await page.waitToClick(selectors.travelThermograph.createThermographButton);
- });
-
- it('should select the file to upload', async() => {
- let currentDir = process.cwd();
- let filePath = `${currentDir}/e2e/assets/thermograph.jpeg`;
-
- const [fileChooser] = await Promise.all([
- page.waitForFileChooser(),
- page.waitToClick(selectors.travelThermograph.uploadIcon)
- ]);
- await fileChooser.accept([filePath]);
-
- await page.waitToClick(selectors.travelThermograph.upload);
-
- const message = await page.waitForSnackbar();
- const state = await page.getState();
-
- expect(message.text).toContain('Data saved!');
- expect(state).toBe('travel.card.thermograph.index');
- });
-
- it('should check everything was saved correctly', async() => {
- const result = await page.waitToGetProperty(selectors.travelThermograph.createdThermograph, 'innerText');
-
- expect(result).toContain(thermographName);
- });
-});
diff --git a/e2e/paths/10-travel/06_search_panel.spec.js b/e2e/paths/10-travel/06_search_panel.spec.js
deleted file mode 100644
index 420ceaf48..000000000
--- a/e2e/paths/10-travel/06_search_panel.spec.js
+++ /dev/null
@@ -1,62 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Travel search panel path', () => {
- let browser;
- let page;
- let httpRequest;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('buyer', 'travel');
- page.on('request', req => {
- if (req.url().includes(`Travels/filter`))
- httpRequest = req.url();
- });
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should filter using all the fields', async() => {
- await page.click(selectors.travelIndex.chip);
- await page.write(selectors.travelIndex.generalSearchFilter, 'travel');
- await page.keyboard.press('Enter');
-
- expect(httpRequest).toContain('search=travel');
-
- await page.click(selectors.travelIndex.chip);
- await page.autocompleteSearch(selectors.travelIndex.agencyFilter, 'Entanglement');
-
- expect(httpRequest).toContain('agencyModeFk');
-
- await page.click(selectors.travelIndex.chip);
- await page.autocompleteSearch(selectors.travelIndex.warehouseOutFilter, 'Warehouse One');
-
- expect(httpRequest).toContain('warehouseOutFk');
-
- await page.click(selectors.travelIndex.chip);
- await page.autocompleteSearch(selectors.travelIndex.warehouseInFilter, 'Warehouse Two');
-
- expect(httpRequest).toContain('warehouseInFk');
-
- await page.click(selectors.travelIndex.chip);
- await page.overwrite(selectors.travelIndex.scopeDaysFilter, '15');
- await page.keyboard.press('Enter');
-
- expect(httpRequest).toContain('scopeDays=15');
-
- await page.click(selectors.travelIndex.chip);
- await page.autocompleteSearch(selectors.travelIndex.continentFilter, 'Asia');
-
- expect(httpRequest).toContain('continent');
-
- await page.click(selectors.travelIndex.chip);
- await page.write(selectors.travelIndex.totalEntriesFilter, '1');
- await page.keyboard.press('Enter');
-
- expect(httpRequest).toContain('totalEntries=1');
- });
-});
diff --git a/e2e/paths/11-zone/01_basic-data.spec.js b/e2e/paths/11-zone/01_basic-data.spec.js
deleted file mode 100644
index 34d08c57f..000000000
--- a/e2e/paths/11-zone/01_basic-data.spec.js
+++ /dev/null
@@ -1,104 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Zone basic data path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
-
- await page.loginAndModule('deliveryAssistant',
- 'zone'); // turns up the zone module name and route aint the same lol
- await page.accessToSearchResult('10');
- await page.accessToSection('zone.card.basicData');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should reach the basic data section', async() => {
- await page.waitForState('zone.card.basicData');
- });
-
- it('should edit de form and then save', async() => {
- await page.clearInput(selectors.zoneBasicData.name);
- await page.write(selectors.zoneBasicData.name, 'Brimstone teleportation');
- await page.autocompleteSearch(selectors.zoneBasicData.agency, 'Quantum break device');
- await page.clearInput(selectors.zoneBasicData.maxVolume);
- await page.write(selectors.zoneBasicData.maxVolume, '10');
- await page.clearInput(selectors.zoneBasicData.travelingDays);
- await page.write(selectors.zoneBasicData.travelingDays, '1');
- await page.clearInput(selectors.zoneBasicData.closing);
- await page.pickTime(selectors.zoneBasicData.closing, '21:00');
- await page.clearInput(selectors.zoneBasicData.price);
- await page.write(selectors.zoneBasicData.price, '999');
- await page.clearInput(selectors.zoneBasicData.bonus);
- await page.write(selectors.zoneBasicData.bonus, '100');
- await page.clearInput(selectors.zoneBasicData.inflation);
- await page.write(selectors.zoneBasicData.inflation, '200');
- await page.waitToClick(selectors.zoneBasicData.volumetric);
- await page.waitToClick(selectors.zoneBasicData.saveButton);
- const message = await page.waitForSnackbar();
-
- expect(message.text).toContain('Data saved!');
- });
-
- it('should now reload the section', async() => {
- await page.reloadSection('zone.card.basicData');
- });
-
- it('should confirm the name was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.name, 'value');
-
- expect(result).toEqual('Brimstone teleportation');
- });
-
- it('should confirm the agency was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.agency, 'value');
-
- expect(result).toEqual('Quantum break device');
- });
-
- it('should confirm the max volume was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.maxVolume, 'value');
-
- expect(result).toEqual('10');
- });
-
- it('should confirm the traveling days were updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.travelingDays, 'value');
-
- expect(result).toEqual('1');
- });
-
- it('should confirm the closing hour was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.closing, 'value');
-
- expect(result).toEqual('21:00');
- });
-
- it('should confirm the price was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.price, 'value');
-
- expect(result).toEqual('999');
- });
-
- it('should confirm the bonus was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.bonus, 'value');
-
- expect(result).toEqual('100');
- });
-
- it('should confirm the inflation was updated', async() => {
- const result = await page.waitToGetProperty(selectors.zoneBasicData.inflation, 'value');
-
- expect(result).toEqual('200');
- });
-
- it('should confirm the volumetric checkbox was checked', async() => {
- await page.waitForClassPresent(selectors.zoneBasicData.volumetric, 'checked');
- });
-});
diff --git a/e2e/paths/11-zone/02_descriptor.spec.js b/e2e/paths/11-zone/02_descriptor.spec.js
deleted file mode 100644
index baccc910f..000000000
--- a/e2e/paths/11-zone/02_descriptor.spec.js
+++ /dev/null
@@ -1,32 +0,0 @@
-import selectors from '../../helpers/selectors.js';
-import getBrowser from '../../helpers/puppeteer';
-
-describe('Zone descriptor path', () => {
- let browser;
- let page;
-
- beforeAll(async() => {
- browser = await getBrowser();
- page = browser.page;
- await page.loginAndModule('deliveryAssistant', 'zone');
- await page.accessToSearchResult('13');
- });
-
- afterAll(async() => {
- await browser.close();
- });
-
- it('should eliminate the zone using the descriptor option', async() => {
- await page.waitToClick(selectors.zoneDescriptor.menu);
- await page.waitToClick(selectors.zoneDescriptor.deleteZone);
- await page.respondToDialog('accept');
- await page.waitForState('zone.index');
- });
-
- it('should search for the deleted zone to find no results', async() => {
- await page.doSearch('13');
- const count = await page.countElement(selectors.zoneIndex.searchResult);
-
- expect(count).toEqual(0);
- });
-});
diff --git a/front/core/services/app.js b/front/core/services/app.js
index cec7bea7f..b8fcc43e1 100644
--- a/front/core/services/app.js
+++ b/front/core/services/app.js
@@ -57,7 +57,7 @@ export default class App {
getUrl(route, appName = 'lilium') {
const index = window.location.hash.indexOf(route.toLowerCase());
- const newRoute = index < 0 ? route : window.location.hash.substring(index);
+ let newRoute = index < 0 ? route : window.location.hash.substring(index);
const env = process.env.NODE_ENV;
const filter = {
where: {and: [
@@ -66,6 +66,11 @@ export default class App {
]}
};
+
+ if (this.logger.$params.q)
+ newRoute = newRoute.concat(`?table=${this.logger.$params.q}`);
+
+
return this.logger.$http.get('Urls/findOne', {filter})
.then(res => {
if (res && res.data)
diff --git a/loopback/locale/en.json b/loopback/locale/en.json
index 352e08826..ea84cb6eb 100644
--- a/loopback/locale/en.json
+++ b/loopback/locale/en.json
@@ -235,10 +235,10 @@
"Cannot add holidays on this day": "Cannot add holidays on this day",
"Cannot send mail": "Cannot send mail",
"CONSTRAINT `chkParkingCodeFormat` failed for `vn`.`parking`": "CONSTRAINT `chkParkingCodeFormat` failed for `vn`.`parking`",
+ "This postcode already exists": "This postcode already exists",
"Original invoice not found": "Original invoice not found",
"There is already a tray with the same height": "There is already a tray with the same height",
"The height must be greater than 50cm": "The height must be greater than 50cm",
"The maximum height of the wagon is 200cm": "The maximum height of the wagon is 200cm",
- "This postcode already exists": "This postcode already exists",
- "This buyer has already made a reservation for this date": "This buyer has already made a reservation for this date"
-}
\ No newline at end of file
+ "The quantity claimed cannot be greater than the quantity of the line": "The quantity claimed cannot be greater than the quantity of the line"
+}
diff --git a/loopback/locale/es.json b/loopback/locale/es.json
index ba4b90cb5..796c945e8 100644
--- a/loopback/locale/es.json
+++ b/loopback/locale/es.json
@@ -366,9 +366,11 @@
"The invoices have been created but the PDFs could not be generated": "Se ha facturado pero no se ha podido generar el PDF",
"It has been invoiced but the PDF of refund not be generated": "Se ha facturado pero no se ha podido generar el PDF del abono",
"Payment method is required": "El método de pago es obligatorio",
- "Cannot send mail": "No se ha podido enviar el correo",
+ "Cannot send mail": "Não é possível enviar o email",
"CONSTRAINT `supplierAccountTooShort` failed for `vn`.`supplier`": "La cuenta debe tener exactamente 10 dígitos",
"The sale not exists in the item shelving": "La venta no existe en la estantería del artículo",
+ "The entry not have stickers": "La entrada no tiene etiquetas",
+ "Too many records": "Demasiados registros",
"Original invoice not found": "Factura original no encontrada",
"The entry has no lines or does not exist": "La entrada no tiene lineas o no existe",
"Weight already set": "El peso ya está establecido",
@@ -377,7 +379,7 @@
"The height must be greater than 50cm": "La altura debe ser superior a 50cm",
"The maximum height of the wagon is 200cm": "La altura máxima es 200cm",
"The entry does not have stickers": "La entrada no tiene etiquetas",
- "Too many records": "Demasiados registros",
"This buyer has already made a reservation for this date": "Este comprador ya ha hecho una reserva para esta fecha",
- "No valid travel thermograph found": "No se encontró un termógrafo válido"
+ "No valid travel thermograph found": "No se encontró un termógrafo válido",
+ "The quantity claimed cannot be greater than the quantity of the line": "La cantidad reclamada no puede ser mayor que la cantidad de la línea"
}
diff --git a/loopback/locale/fr.json b/loopback/locale/fr.json
index 601fe35f5..a6648b186 100644
--- a/loopback/locale/fr.json
+++ b/loopback/locale/fr.json
@@ -361,6 +361,6 @@
"The invoices have been created but the PDFs could not be generated": "La facture a été émise mais le PDF n'a pas pu être généré",
"It has been invoiced but the PDF of refund not be generated": "Il a été facturé mais le PDF de remboursement n'a pas été généré",
"Cannot send mail": "Impossible d'envoyer le mail",
- "Original invoice not found": "Facture originale introuvable"
-
+ "Original invoice not found": "Facture originale introuvable",
+ "The quantity claimed cannot be greater than the quantity of the line": "Le montant réclamé ne peut pas être supérieur au montant de la ligne"
}
diff --git a/loopback/locale/pt.json b/loopback/locale/pt.json
index a6a65710f..a43f0e780 100644
--- a/loopback/locale/pt.json
+++ b/loopback/locale/pt.json
@@ -361,5 +361,6 @@
"The invoices have been created but the PDFs could not be generated": "Foi faturado, mas o PDF não pôde ser gerado",
"It has been invoiced but the PDF of refund not be generated": "Foi faturado mas não foi gerado o PDF do reembolso",
"Original invoice not found": "Fatura original não encontrada",
- "Cannot send mail": "Não é possível enviar o email"
+ "Cannot send mail": "Não é possível enviar o email",
+ "The quantity claimed cannot be greater than the quantity of the line": "O valor reclamado não pode ser superior ao valor da linha"
}
diff --git a/modules/account/back/models/mail-alias-account.js b/modules/account/back/models/mail-alias-account.js
index 0eee6a123..61ca344e9 100644
--- a/modules/account/back/models/mail-alias-account.js
+++ b/modules/account/back/models/mail-alias-account.js
@@ -1,6 +1,5 @@
const ForbiddenError = require('vn-loopback/util/forbiddenError');
-const UserError = require('vn-loopback/util/user-error');
module.exports = Self => {
Self.rewriteDbError(function(err) {
diff --git a/modules/claim/back/methods/claim-beginning/specs/claim-beginning.spec.js b/modules/claim/back/methods/claim-beginning/specs/claim-beginning.spec.js
new file mode 100644
index 000000000..b7974ad23
--- /dev/null
+++ b/modules/claim/back/methods/claim-beginning/specs/claim-beginning.spec.js
@@ -0,0 +1,55 @@
+const models = require('vn-loopback/server/server').models;
+const LoopBackContext = require('loopback-context');
+
+describe('ClaimBeginning model()', () => {
+ const claimFk = 1;
+ const activeCtx = {
+ accessToken: {userId: 18},
+ headers: {origin: 'localhost:5000'},
+ __: () => {}
+ };
+
+ beforeEach(() => {
+ spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
+ active: activeCtx
+ });
+ });
+
+ it('should change quantity claimed', async() => {
+ const tx = await models.ClaimBeginning.beginTransaction({});
+
+ let error;
+ try {
+ const options = {transaction: tx};
+ const claim = await models.ClaimBeginning.findOne({where: {claimFk}}, options);
+ const sale = await models.Sale.findById(claim.saleFk, {}, options);
+ await claim.updateAttribute('quantity', sale.quantity - 1, options);
+
+ await tx.rollback();
+ } catch (e) {
+ error = e;
+ await tx.rollback();
+ }
+
+ expect(error).toBeUndefined();
+ });
+
+ it('should throw error when quantity claimed is greater than quantity of the sale', async() => {
+ const tx = await models.ClaimBeginning.beginTransaction({});
+
+ let error;
+ try {
+ const options = {transaction: tx};
+ const claim = await models.ClaimBeginning.findOne({where: {claimFk}}, options);
+ const sale = await models.Sale.findById(claim.saleFk, {}, options);
+ await claim.updateAttribute('quantity', sale.quantity + 1, options);
+
+ await tx.rollback();
+ } catch (e) {
+ error = e;
+ await tx.rollback();
+ }
+
+ expect(error.toString()).toContain('The quantity claimed cannot be greater than the quantity of the line');
+ });
+});
diff --git a/modules/claim/back/models/claim-beginning.js b/modules/claim/back/models/claim-beginning.js
index d269b2285..3dc9261c3 100644
--- a/modules/claim/back/models/claim-beginning.js
+++ b/modules/claim/back/models/claim-beginning.js
@@ -10,16 +10,21 @@ module.exports = Self => {
});
Self.observe('before save', async ctx => {
+ const options = ctx.options;
+ const models = Self.app.models;
+ const saleFk = ctx?.currentInstance?.saleFk || ctx?.instance?.saleFk;
+ const sale = await models.Sale.findById(saleFk, {fields: ['ticketFk', 'quantity']}, options);
+
if (ctx.isNewInstance) {
- const models = Self.app.models;
- const options = ctx.options;
- const instance = ctx.instance;
- const ticket = await models.Sale.findById(instance.saleFk, {fields: ['ticketFk']}, options);
- const claim = await models.Claim.findById(instance.claimFk, {fields: ['ticketFk']}, options);
- if (ticket.ticketFk != claim.ticketFk)
+ const claim = await models.Claim.findById(ctx.instance.claimFk, {fields: ['ticketFk']}, options);
+ if (sale.ticketFk != claim.ticketFk)
throw new UserError(`Cannot create a new claimBeginning from a different ticket`);
}
+
await claimIsEditable(ctx);
+
+ if (sale?.quantity && ctx.data?.quantity && ctx.data.quantity > sale?.quantity)
+ throw new UserError('The quantity claimed cannot be greater than the quantity of the line');
});
Self.observe('before delete', async ctx => {
diff --git a/modules/entry/back/methods/entry/filter.js b/modules/entry/back/methods/entry/filter.js
index 776544bc6..f4703245c 100644
--- a/modules/entry/back/methods/entry/filter.js
+++ b/modules/entry/back/methods/entry/filter.js
@@ -106,10 +106,15 @@ module.exports = Self => {
description: `The to shipped date filter`
},
{
- arg: 'days',
+ arg: 'daysOnward',
type: 'number',
description: `N days interval`
},
+ {
+ arg: 'daysAgo',
+ type: 'number',
+ description: `N days ago interval`
+ },
{
arg: 'invoiceAmount',
type: 'number',
@@ -216,16 +221,29 @@ module.exports = Self => {
JOIN vn.currency cu ON cu.id = e.currencyFk`
);
- if (ctx.args.days) {
- stmt.merge({
- sql: `
- AND t.shipped <= util.VN_CURDATE() + INTERVAL ? DAY
- AND t.shipped >= util.VN_CURDATE()
- `,
- params: [ctx.args.days]
- });
+ stmt.merge(conn.makeWhere(filter.where));
+
+ const {daysAgo, daysOnward} = ctx.args;
+ if (daysOnward || daysAgo) {
+ const params = [];
+ let today = 'util.VN_CURDATE()';
+ let from = today;
+ let to = today;
+
+ if (daysAgo) {
+ from += ' - INTERVAL ? DAY';
+ params.push(daysAgo);
+ }
+ if (daysOnward) {
+ to += ' + INTERVAL ? DAY';
+ params.push(daysOnward);
+ }
+
+ const whereDays = (filter.where ? 'AND' : 'WHERE') + ` t.shipped BETWEEN ${from} AND ${to}`;
+ stmt.merge({sql: whereDays, params});
}
- stmt.merge(conn.makeSuffix(filter));
+
+ stmt.merge(conn.makePagination(filter));
const itemsIndex = stmts.push(stmt) - 1;
const sql = ParameterizedSQL.join(stmts, ';');
diff --git a/modules/entry/back/methods/entry/print.js b/modules/entry/back/methods/entry/print.js
index 11abf0788..5b9de9a69 100644
--- a/modules/entry/back/methods/entry/print.js
+++ b/modules/entry/back/methods/entry/print.js
@@ -52,7 +52,7 @@ module.exports = Self => {
await merger.add(new Uint8Array(pdfBuffer[0]));
}
- if (!merger._doc) throw new UserError('The entry does not have stickers');
+ if (!merger._doc) throw new UserError('The entry not have stickers');
await Self.rawSql(`
UPDATE buy
diff --git a/modules/entry/back/methods/entry/specs/filter.spec.js b/modules/entry/back/methods/entry/specs/filter.spec.js
index 145da170a..105838858 100644
--- a/modules/entry/back/methods/entry/specs/filter.spec.js
+++ b/modules/entry/back/methods/entry/specs/filter.spec.js
@@ -39,7 +39,7 @@ describe('Entry filter()', () => {
const result = await models.Entry.filter(ctx, options);
- expect(result.length).toEqual(12);
+ expect(result.length).toEqual(11);
await tx.rollback();
} catch (e) {
@@ -49,13 +49,13 @@ describe('Entry filter()', () => {
});
describe('should return the entry matching the supplier', () => {
- it('when userId is supplier ', async() => {
+ it('when userId is supplier and searching days onward', async() => {
const tx = await models.Entry.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
- args: {days: 6},
+ args: {daysOnward: 6},
req: {accessToken: {userId: 1102}}
};
@@ -70,6 +70,27 @@ describe('Entry filter()', () => {
}
});
+ it('when userId is supplier and searching days ago', async() => {
+ const tx = await models.Entry.beginTransaction({});
+ const options = {transaction: tx};
+
+ try {
+ const ctx = {
+ args: {daysAgo: 31},
+ req: {accessToken: {userId: 1102}}
+ };
+
+ const result = await models.Entry.filter(ctx, options);
+
+ expect(result.length).toEqual(6);
+
+ await tx.rollback();
+ } catch (e) {
+ await tx.rollback();
+ throw e;
+ }
+ });
+
it('when userId is supplier fetching other supplier', async() => {
const tx = await models.Entry.beginTransaction({});
const options = {transaction: tx};
@@ -131,7 +152,7 @@ describe('Entry filter()', () => {
const result = await models.Entry.filter(ctx, options);
- expect(result.length).toEqual(11);
+ expect(result.length).toEqual(10);
await tx.rollback();
} catch (e) {
diff --git a/modules/entry/back/model-config.json b/modules/entry/back/model-config.json
index 85f5e8285..5c45b6e07 100644
--- a/modules/entry/back/model-config.json
+++ b/modules/entry/back/model-config.json
@@ -29,4 +29,4 @@
"StockBought": {
"dataSource": "vn"
}
-}
\ No newline at end of file
+}
diff --git a/modules/invoiceOut/back/methods/invoiceOut/createManualInvoice.js b/modules/invoiceOut/back/methods/invoiceOut/createManualInvoice.js
index c46da0ba5..a06128848 100644
--- a/modules/invoiceOut/back/methods/invoiceOut/createManualInvoice.js
+++ b/modules/invoiceOut/back/methods/invoiceOut/createManualInvoice.js
@@ -10,6 +10,11 @@ module.exports = Self => {
type: 'any',
description: 'The invoiceable client id'
},
+ {
+ arg: 'addressFk',
+ type: 'any',
+ description: 'The address id'
+ },
{
arg: 'ticketFk',
type: 'any',
@@ -23,7 +28,8 @@ module.exports = Self => {
{
arg: 'serial',
type: 'string',
- description: 'The invoice serial'
+ description: 'The invoice serial',
+ required: true
},
{
arg: 'taxArea',
@@ -46,108 +52,126 @@ module.exports = Self => {
}
});
- Self.createManualInvoice = async(ctx, clientFk, ticketFk, maxShipped, serial, taxArea, reference, options) => {
- if (!clientFk && !ticketFk) throw new UserError(`Select ticket or client`);
- const models = Self.app.models;
- const myOptions = {userId: ctx.req.accessToken.userId};
- let tx;
+ Self.createManualInvoice =
+ async(ctx, clientFk, addressFk, ticketFk, maxShipped, serial, taxArea, reference, options) => {
+ if (!clientFk && !ticketFk) throw new UserError(`Select ticket or client`);
+ const models = Self.app.models;
+ const myOptions = {userId: ctx.req.accessToken.userId};
+ let tx;
- if (typeof options == 'object')
- Object.assign(myOptions, options);
+ if (typeof options == 'object')
+ Object.assign(myOptions, options);
- if (!myOptions.transaction) {
- tx = await Self.beginTransaction({});
- myOptions.transaction = tx;
- }
+ if (!myOptions.transaction) {
+ tx = await Self.beginTransaction({});
+ myOptions.transaction = tx;
+ }
- let companyId;
- let newInvoice;
- let query;
- try {
- if (ticketFk) {
- const ticket = await models.Ticket.findById(ticketFk, null, myOptions);
- const company = await models.Company.findById(ticket.companyFk, null, myOptions);
+ let companyFk;
+ let newInvoice;
+ let query;
+ try {
+ if (ticketFk) {
+ const ticket = await models.Ticket.findById(ticketFk, {
+ fields: ['clientFk', 'companyFk', 'shipped', 'refFk', 'totalWithVat']
+ }, myOptions);
+ const company = await models.Company.findById(ticket.companyFk, {
+ fields: ['code']
+ }, myOptions);
- clientFk = ticket.clientFk;
- maxShipped = ticket.shipped;
- companyId = ticket.companyFk;
+ clientFk = ticket.clientFk;
+ maxShipped = ticket.shipped;
+ companyFk = ticket.companyFk;
- // Validates invoiced ticket
- if (ticket.refFk)
- throw new UserError('This ticket is already invoiced');
+ if (ticket.refFk)
+ throw new UserError('This ticket is already invoiced');
- // Validates ticket amount
- if (ticket.totalWithVat == 0)
- throw new UserError(`A ticket with an amount of zero can't be invoiced`);
+ if (ticket.totalWithVat == 0)
+ throw new UserError(`A ticket with an amount of zero can't be invoiced`);
- // Validates ticket nagative base
- const hasNegativeBase = await getNegativeBase(maxShipped, clientFk, companyId, myOptions);
- if (hasNegativeBase && company.code == 'VNL')
- throw new UserError(`A ticket with a negative base can't be invoiced`);
- } else {
- if (!maxShipped)
- throw new UserError(`Max shipped required`);
+ const hasNegativeBase = await getNegativeBase(maxShipped, clientFk, companyFk, myOptions);
+ if (hasNegativeBase && company.code == 'VNL')
+ throw new UserError(`A ticket with a negative base can't be invoiced`);
+ } else {
+ if (!maxShipped)
+ throw new UserError(`Max shipped required`);
- const company = await models.Ticket.findOne({
- fields: ['companyFk'],
- where: {
- clientFk: clientFk,
- shipped: {lte: maxShipped}
+ if (addressFk) {
+ const address = await models.Address.findById(addressFk, {
+ fields: ['clientFk']
+ }, myOptions);
+
+ if (clientFk && clientFk !== address.clientFk)
+ throw new UserError('The provided clientFk does not match');
}
- }, myOptions);
- companyId = company.companyFk;
+ const company = await models.Ticket.findOne({
+ fields: ['companyFk'],
+ where: {
+ clientFk: clientFk,
+ shipped: {lte: maxShipped}
+ }
+ }, myOptions);
+ companyFk = company.companyFk;
+ }
+
+ const isClientInvoiceable = await isInvoiceable(clientFk, myOptions);
+ if (!isClientInvoiceable)
+ throw new UserError(`This client is not invoiceable`);
+
+ const tomorrow = Date.vnNew();
+ tomorrow.setDate(tomorrow.getDate() + 1);
+
+ if (maxShipped >= tomorrow)
+ throw new UserError(`Can't invoice to future`);
+
+ const maxInvoiceDate = await getMaxIssued(serial, companyFk, myOptions);
+ if (Date.vnNew() < maxInvoiceDate)
+ throw new UserError(`Can't invoice to past`);
+
+ if (ticketFk) {
+ query = `CALL invoiceOut_newFromTicket(?, ?, ?, ?, @newInvoiceId)`;
+ await Self.rawSql(query, [
+ ticketFk,
+ serial,
+ taxArea,
+ reference
+ ], myOptions);
+ } else if (addressFk) {
+ query = `CALL invoiceOut_newFromAddress(?, ?, ?, ?, ?, ?, @newInvoiceId)`;
+ await Self.rawSql(query, [
+ addressFk,
+ serial,
+ maxShipped,
+ companyFk,
+ taxArea,
+ reference
+ ], myOptions);
+ } else {
+ query = `CALL invoiceOut_newFromClient(?, ?, ?, ?, ?, ?, @newInvoiceId)`;
+ await Self.rawSql(query, [
+ clientFk,
+ serial,
+ maxShipped,
+ companyFk,
+ taxArea,
+ reference
+ ], myOptions);
+ }
+
+ [newInvoice] = await Self.rawSql(`SELECT @newInvoiceId id`, null, myOptions);
+
+ if (tx) await tx.commit();
+ } catch (e) {
+ if (tx) await tx.rollback();
+ throw e;
}
- // Validate invoiceable client
- const isClientInvoiceable = await isInvoiceable(clientFk, myOptions);
- if (!isClientInvoiceable)
- throw new UserError(`This client is not invoiceable`);
+ if (!newInvoice.id) throw new UserError('It was not able to create the invoice');
- // Can't invoice tickets into future
- const tomorrow = Date.vnNew();
- tomorrow.setDate(tomorrow.getDate() + 1);
+ await Self.createPdf(ctx, newInvoice.id);
- if (maxShipped >= tomorrow)
- throw new UserError(`Can't invoice to future`);
-
- const maxInvoiceDate = await getMaxIssued(serial, companyId, myOptions);
- if (Date.vnNew() < maxInvoiceDate)
- throw new UserError(`Can't invoice to past`);
-
- if (ticketFk) {
- query = `CALL invoiceOut_newFromTicket(?, ?, ?, ?, @newInvoiceId)`;
- await Self.rawSql(query, [
- ticketFk,
- serial,
- taxArea,
- reference
- ], myOptions);
- } else {
- query = `CALL invoiceOut_newFromClient(?, ?, ?, ?, ?, ?, @newInvoiceId)`;
- await Self.rawSql(query, [
- clientFk,
- serial,
- maxShipped,
- companyId,
- taxArea,
- reference
- ], myOptions);
- }
-
- [newInvoice] = await Self.rawSql(`SELECT @newInvoiceId id`, null, myOptions);
-
- if (tx) await tx.commit();
- } catch (e) {
- if (tx) await tx.rollback();
- throw e;
- }
-
- if (!newInvoice.id) throw new UserError('It was not able to create the invoice');
-
- await Self.createPdf(ctx, newInvoice.id);
-
- return newInvoice;
- };
+ return newInvoice;
+ };
async function isInvoiceable(clientFk, options) {
const models = Self.app.models;
@@ -159,10 +183,10 @@ module.exports = Self => {
return result.invoiceable;
}
- async function getNegativeBase(maxShipped, clientFk, companyId, options) {
+ async function getNegativeBase(maxShipped, clientFk, companyFk, options) {
const models = Self.app.models;
await models.InvoiceOut.rawSql('CALL invoiceOut_exportationFromClient(?,?,?)',
- [maxShipped, clientFk, companyId], options
+ [maxShipped, clientFk, companyFk], options
);
const query = 'SELECT vn.hasAnyNegativeBase() AS base';
const [result] = await models.InvoiceOut.rawSql(query, [], options);
@@ -170,14 +194,14 @@ module.exports = Self => {
return result.base;
}
- async function getMaxIssued(serial, companyId, options) {
+ async function getMaxIssued(serial, companyFk, options) {
const models = Self.app.models;
const query = `SELECT MAX(issued) AS issued
FROM invoiceOut
WHERE serial = ? AND companyFk = ?`;
const [maxIssued] = await models.InvoiceOut.rawSql(query,
- [serial, companyId], options);
- const maxInvoiceDate = maxIssued && maxIssued.issued || Date.vnNew();
+ [serial, companyFk], options);
+ const maxInvoiceDate = maxIssued?.issued || Date.vnNew();
return maxInvoiceDate;
}
diff --git a/modules/invoiceOut/back/methods/invoiceOut/invoiceClient.js b/modules/invoiceOut/back/methods/invoiceOut/invoiceClient.js
index bf7e7d3cb..2fad1afd8 100644
--- a/modules/invoiceOut/back/methods/invoiceOut/invoiceClient.js
+++ b/modules/invoiceOut/back/methods/invoiceOut/invoiceClient.js
@@ -64,7 +64,7 @@ module.exports = Self => {
try {
const client = await models.Client.findById(args.clientId, {
fields: ['id', 'hasToInvoiceByAddress']
- }, options);
+ }, myOptions);
if (client.hasToInvoiceByAddress) {
await Self.rawSql('CALL ticketToInvoiceByAddress(?, ?, ?, ?)', [
@@ -72,13 +72,13 @@ module.exports = Self => {
args.maxShipped,
args.addressId,
args.companyFk
- ], options);
+ ], myOptions);
} else {
await Self.rawSql('CALL invoiceFromClient(?, ?, ?)', [
args.maxShipped,
client.id,
args.companyFk
- ], options);
+ ], myOptions);
}
const invoiceId = await models.Ticket.makeInvoice(
@@ -87,7 +87,7 @@ module.exports = Self => {
args.companyFk,
args.invoiceDate,
null,
- options
+ myOptions
);
if (tx) await tx.commit();
diff --git a/modules/invoiceOut/back/methods/invoiceOut/specs/createManualInvoice.spec.js b/modules/invoiceOut/back/methods/invoiceOut/specs/createManualInvoice.spec.js
index 55739e570..58c18b730 100644
--- a/modules/invoiceOut/back/methods/invoiceOut/specs/createManualInvoice.spec.js
+++ b/modules/invoiceOut/back/methods/invoiceOut/specs/createManualInvoice.spec.js
@@ -6,110 +6,90 @@ describe('InvoiceOut createManualInvoice()', () => {
const clientId = 1106;
const activeCtx = {accessToken: {userId: 1}};
const ctx = {req: activeCtx};
+ let tx; let options;
+
+ beforeEach(async() => {
+ spyOn(models.InvoiceOut, 'createPdf').and.returnValue(Promise.resolve(true));
+ spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
+ active: activeCtx
+ });
+
+ tx = await models.InvoiceOut.beginTransaction({});
+ options = {transaction: tx};
+ });
+
+ afterEach(async() => {
+ await tx.rollback();
+ });
it('should throw an error trying to invoice again', async() => {
- spyOn(models.InvoiceOut, 'createPdf').and.returnValue(new Promise(resolve => resolve(true)));
-
- const tx = await models.InvoiceOut.beginTransaction({});
- const options = {transaction: tx};
-
let error;
try {
- await createInvoice(ctx, options, undefined, ticketId);
- await createInvoice(ctx, options, undefined, ticketId);
-
- await tx.rollback();
+ await createInvoice(ctx, options, undefined, undefined, ticketId);
+ await createInvoice(ctx, options, undefined, undefined, ticketId);
} catch (e) {
error = e;
- await tx.rollback();
}
expect(error.message).toContain('This ticket is already invoiced');
});
it('should throw an error for a ticket with an amount of zero', async() => {
- spyOn(models.InvoiceOut, 'createPdf').and.returnValue(new Promise(resolve => resolve(true)));
- spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
- active: activeCtx
- });
-
- const tx = await models.InvoiceOut.beginTransaction({});
- const options = {transaction: tx};
-
let error;
try {
const ticket = await models.Ticket.findById(ticketId, null, options);
await ticket.updateAttributes({totalWithVat: 0}, options);
- await createInvoice(ctx, options, undefined, ticketId);
- await tx.rollback();
+ await createInvoice(ctx, options, undefined, undefined, ticketId);
} catch (e) {
error = e;
- await tx.rollback();
}
expect(error.message).toContain(`A ticket with an amount of zero can't be invoiced`);
});
it('should throw an error when the clientFk property is set without the max shipped date', async() => {
- spyOn(models.InvoiceOut, 'createPdf').and.returnValue(new Promise(resolve => resolve(true)));
-
- const tx = await models.InvoiceOut.beginTransaction({});
- const options = {transaction: tx};
-
let error;
try {
await createInvoice(ctx, options, clientId);
- await tx.rollback();
} catch (e) {
error = e;
- await tx.rollback();
}
expect(error.message).toContain(`Max shipped required`);
});
it('should throw an error for a non-invoiceable client', async() => {
- spyOn(models.InvoiceOut, 'createPdf').and.returnValue(new Promise(resolve => resolve(true)));
- spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
- active: activeCtx
- });
-
- const tx = await models.InvoiceOut.beginTransaction({});
- const options = {transaction: tx};
-
let error;
try {
const client = await models.Client.findById(clientId, null, options);
await client.updateAttributes({isTaxDataChecked: false}, options);
- await createInvoice(ctx, options, undefined, ticketId);
-
- await tx.rollback();
+ await createInvoice(ctx, options, undefined, undefined, ticketId);
} catch (e) {
error = e;
- await tx.rollback();
}
expect(error.message).toContain(`This client is not invoiceable`);
});
- it('should create a manual invoice', async() => {
- spyOn(models.InvoiceOut, 'createPdf').and.returnValue(new Promise(resolve => resolve(true)));
+ it('should create a manual invoice with ticket', async() => {
+ const result = await createInvoice(ctx, options, undefined, undefined, ticketId);
- const tx = await models.InvoiceOut.beginTransaction({});
- const options = {transaction: tx};
+ expect(result.id).toEqual(jasmine.any(Number));
+ });
- try {
- const result = await createInvoice(ctx, options, undefined, ticketId);
+ it('should create a manual invoice with client', async() => {
+ const result = await createInvoice(ctx, options, clientId, undefined, undefined, Date.vnNew());
- expect(result.id).toEqual(jasmine.any(Number));
+ expect(result.id).toEqual(jasmine.any(Number));
+ });
- await tx.rollback();
- } catch (e) {
- await tx.rollback();
- throw e;
- }
+ it('should create a manual invoice with address', async() => {
+ const addressFk = 126;
+ const result = await createInvoice(ctx, options, clientId, addressFk, undefined, Date.vnNew());
+
+ expect(result.id).toEqual(jasmine.any(Number));
});
});
@@ -117,6 +97,7 @@ function createInvoice(
ctx,
options,
clientFk = undefined,
+ addressFk = undefined,
ticketFk = undefined,
maxShipped = undefined,
serial = 'T',
@@ -124,6 +105,6 @@ function createInvoice(
reference = undefined
) {
return models.InvoiceOut.createManualInvoice(
- ctx, clientFk, ticketFk, maxShipped, serial, taxArea, reference, options
+ ctx, clientFk, addressFk, ticketFk, maxShipped, serial, taxArea, reference, options
);
}
diff --git a/modules/item/back/methods/item-shelving/updateFromSale.js b/modules/item/back/methods/item-shelving/updateFromSale.js
index 167509074..47ca2a010 100644
--- a/modules/item/back/methods/item-shelving/updateFromSale.js
+++ b/modules/item/back/methods/item-shelving/updateFromSale.js
@@ -38,9 +38,13 @@ module.exports = Self => {
const itemShelving = itemShelvingSale.itemShelving();
const quantity = itemShelving.visible + itemShelvingSale.quantity;
+ const available = itemShelving.available + itemShelvingSale.quantity;
await itemShelving.updateAttributes(
- {visible: quantity},
+ {
+ visible: quantity,
+ available: available
+ },
myOptions
);
if (tx) await tx.commit();
diff --git a/modules/item/back/methods/item/specs/lastEntriesFilter.spec.js b/modules/item/back/methods/item/specs/lastEntriesFilter.spec.js
index 2fd30c2ca..00488e534 100644
--- a/modules/item/back/methods/item/specs/lastEntriesFilter.spec.js
+++ b/modules/item/back/methods/item/specs/lastEntriesFilter.spec.js
@@ -1,6 +1,6 @@
const {models} = require('vn-loopback/server/server');
describe('item lastEntriesFilter()', () => {
- it('should return two entry for the given item', async() => {
+ it('should return one entry for the given item', async() => {
const minDate = Date.vnNew();
minDate.setHours(0, 0, 0, 0);
const maxDate = Date.vnNew();
@@ -13,7 +13,7 @@ describe('item lastEntriesFilter()', () => {
const filter = {where: {itemFk: 1, landed: {between: [minDate, maxDate]}}};
const result = await models.Item.lastEntriesFilter(filter, options);
- expect(result.length).toEqual(2);
+ expect(result.length).toEqual(1);
await tx.rollback();
} catch (e) {
@@ -22,7 +22,7 @@ describe('item lastEntriesFilter()', () => {
}
});
- it('should return six entries for the given item', async() => {
+ it('should return five entries for the given item', async() => {
const minDate = Date.vnNew();
minDate.setHours(0, 0, 0, 0);
minDate.setMonth(minDate.getMonth() - 2, 1);
@@ -37,7 +37,7 @@ describe('item lastEntriesFilter()', () => {
const filter = {where: {itemFk: 1, landed: {between: [minDate, maxDate]}}};
const result = await models.Item.lastEntriesFilter(filter, options);
- expect(result.length).toEqual(6);
+ expect(result.length).toEqual(5);
await tx.rollback();
} catch (e) {
diff --git a/modules/ticket/back/methods/sale-tracking/setPicked.js b/modules/ticket/back/methods/sale-tracking/setPicked.js
index ed3656cf4..b63a0474f 100644
--- a/modules/ticket/back/methods/sale-tracking/setPicked.js
+++ b/modules/ticket/back/methods/sale-tracking/setPicked.js
@@ -75,7 +75,11 @@ module.exports = Self => {
const itemShelving = await models.ItemShelving.findById(itemShelvingFk, null, myOptions);
- await itemShelving.updateAttributes({visible: itemShelving.visible - quantity}, myOptions);
+ await itemShelving.updateAttributes(
+ {
+ visible: itemShelving.visible - quantity,
+ available: itemShelving.available - quantity
+ }, myOptions);
await Self.updateAll(
{saleFk},
diff --git a/modules/ticket/back/methods/sale/clone.js b/modules/ticket/back/methods/sale/clone.js
index 24346f3ba..0b658a69e 100644
--- a/modules/ticket/back/methods/sale/clone.js
+++ b/modules/ticket/back/methods/sale/clone.js
@@ -142,12 +142,19 @@ module.exports = Self => {
ctx.args.addressId = ticket.addressFk;
const newTicket = await models.Ticket.new(ctx, myOptions);
-
- await models.TicketRefund.create({
- originalTicketFk: ticketId,
- refundTicketFk: newTicket.id
- }, myOptions);
-
+ const existingRefund = await models.TicketRefund.findOne({
+ where: {
+ originalTicketFk: ticketId,
+ refundTicketFk: newTicket.id
+ },
+ myOptions
+ });
+ if (!existingRefund) {
+ await models.TicketRefund.create({
+ originalTicketFk: ticketId,
+ refundTicketFk: newTicket.id
+ }, myOptions);
+ }
return newTicket;
}
};
diff --git a/modules/travel/back/methods/travel/saveThermograph.js b/modules/travel/back/methods/travel/saveThermograph.js
index d246d8149..6f7e1c8bf 100644
--- a/modules/travel/back/methods/travel/saveThermograph.js
+++ b/modules/travel/back/methods/travel/saveThermograph.js
@@ -117,7 +117,8 @@ module.exports = Self => {
result: state,
maxTemperature,
minTemperature,
- temperatureFk
+ temperatureFk,
+ warehouseFk: warehouseId,
}, myOptions);
if (tx) await tx.commit();
diff --git a/modules/travel/back/methods/travel/specs/saveThermograph.spec.js b/modules/travel/back/methods/travel/specs/saveThermograph.spec.js
index c7d848c08..c2da4234e 100644
--- a/modules/travel/back/methods/travel/specs/saveThermograph.spec.js
+++ b/modules/travel/back/methods/travel/specs/saveThermograph.spec.js
@@ -4,7 +4,7 @@ describe('Thermograph saveThermograph()', () => {
const ctx = beforeAll.getCtx();
const travelFk = 1;
const thermographId = '138350-0';
- const warehouseFk = '1';
+ const warehouseFk = 1;
const state = 'COMPLETED';
const maxTemperature = 30;
const minTemperature = 10;
@@ -41,7 +41,7 @@ describe('Thermograph saveThermograph()', () => {
maxTemperature,
minTemperature,
temperatureFk,
- null,
+ warehouseFk,
null,
null,
null,
diff --git a/modules/travel/front/basic-data/index.html b/modules/travel/front/basic-data/index.html
deleted file mode 100644
index 783208d9a..000000000
--- a/modules/travel/front/basic-data/index.html
+++ /dev/null
@@ -1,92 +0,0 @@
-
- Id - | -- Supplier - | -- Agency - | -- Amount - | -- Reference - | -- Packages - | -- Bl. KG - | -- Phy. KG - | -- Vol. KG - | -- Wh. Out - | -- W. Shipped - | -- Wh. In - | -- W. Landed - | -
---|---|---|---|---|---|---|---|---|---|---|---|---|
- - {{::travel.id}} - - | -- - {{::travel.cargoSupplierNickname}} - - | -- | {{::travel.agencyModeName}} | -
- |
- {{::travel.stickers}} | -
- |
- {{::travel.loadedKg}} | -{{::travel.volumeKg}} | -{{::travel.warehouseOutName}} | -{{::travel.shipped | date: 'dd/MM/yyyy'}} | -{{::travel.warehouseInName}} | -{{::travel.landed | date: 'dd/MM/yyyy'}} | -
- - {{::entry.id}} - - | -- - {{::entry.supplierName}} - - | -{{::entry.invoiceAmount | currency: 'EUR': 2}} | -- | {{::entry.reference}} | -{{::entry.stickers}} | -- | {{::entry.loadedkg}} | -{{::entry.volumeKg}} | -- | - | - | - |