Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6276-createNewWarehouse
gitea/salix/pipeline/head There was a failure building this commit
Details
gitea/salix/pipeline/head There was a failure building this commit
Details
This commit is contained in:
commit
01340408c6
|
@ -1,16 +1,11 @@
|
||||||
const {ParameterizedSQL} = require('loopback-connector');
|
const {ParameterizedSQL} = require('loopback-connector');
|
||||||
const {buildFilter, mergeFilters} = require('vn-loopback/util/filter');
|
const {buildFilter} = require('vn-loopback/util/filter');
|
||||||
// const {models} = require('vn-loopback/server/server');
|
|
||||||
|
|
||||||
module.exports = Self => {
|
module.exports = Self => {
|
||||||
Self.remoteMethod('filter', {
|
Self.remoteMethod('filter', {
|
||||||
description:
|
description:
|
||||||
'Find all postcodes of the model matched by postcode, town, province or country.',
|
'Find all postcodes of the model matched by postcode, town, province or country.',
|
||||||
accessType: 'READ',
|
accessType: 'READ',
|
||||||
returns: {
|
|
||||||
type: ['object'],
|
|
||||||
root: true,
|
|
||||||
},
|
|
||||||
accepts: [
|
accepts: [
|
||||||
{
|
{
|
||||||
arg: 'filter',
|
arg: 'filter',
|
||||||
|
@ -25,6 +20,10 @@ module.exports = Self => {
|
||||||
http: {source: 'query'}
|
http: {source: 'query'}
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
returns: {
|
||||||
|
type: ['object'],
|
||||||
|
root: true,
|
||||||
|
},
|
||||||
http: {
|
http: {
|
||||||
path: `/filter`,
|
path: `/filter`,
|
||||||
verb: 'GET',
|
verb: 'GET',
|
||||||
|
@ -32,30 +31,34 @@ module.exports = Self => {
|
||||||
});
|
});
|
||||||
Self.filter = async(ctx, filter, options) => {
|
Self.filter = async(ctx, filter, options) => {
|
||||||
const myOptions = {};
|
const myOptions = {};
|
||||||
|
|
||||||
if (typeof options == 'object')
|
if (typeof options == 'object')
|
||||||
Object.assign(myOptions, options);
|
Object.assign(myOptions, options);
|
||||||
|
|
||||||
|
filter = ctx?.filter ?? {};
|
||||||
|
|
||||||
const conn = Self.dataSource.connector;
|
const conn = Self.dataSource.connector;
|
||||||
const where = buildFilter(ctx.args, (param, value) => {
|
const where = buildFilter(filter?.where, (param, value) => {
|
||||||
switch (param) {
|
switch (param) {
|
||||||
case 'search':
|
case 'search':
|
||||||
return {or: [
|
return {
|
||||||
{'pc.code': {like: `%${value}%`}},
|
or: [
|
||||||
{'t.name': {like: `%${value}%`}},
|
{'pc.code': {like: `%${value}%`}},
|
||||||
{'p.name': {like: `%${value}%`}},
|
{'t.name': {like: `%${value}%`}},
|
||||||
{'c.country': {like: `%${value}%`}}
|
{'p.name': {like: `%${value}%`}},
|
||||||
]
|
{'c.country': {like: `%${value}%`}}
|
||||||
|
]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
}) ?? {};
|
}) ?? {};
|
||||||
|
delete ctx.filter.where;
|
||||||
filter = mergeFilters(ctx.args?.filter ?? {}, {where});
|
|
||||||
|
|
||||||
const stmts = [];
|
const stmts = [];
|
||||||
let stmt;
|
let stmt;
|
||||||
stmt = new ParameterizedSQL(`
|
stmt = new ParameterizedSQL(`
|
||||||
SELECT
|
SELECT
|
||||||
|
pc.townFk,
|
||||||
|
t.provinceFk,
|
||||||
|
p.countryFk,
|
||||||
pc.code,
|
pc.code,
|
||||||
t.name as town,
|
t.name as town,
|
||||||
p.name as province,
|
p.name as province,
|
||||||
|
@ -67,7 +70,7 @@ module.exports = Self => {
|
||||||
JOIN country c on c.id = p.countryFk
|
JOIN country c on c.id = p.countryFk
|
||||||
`);
|
`);
|
||||||
|
|
||||||
stmt.merge(conn.makeSuffix(filter));
|
stmt.merge(conn.makeSuffix({where, ...ctx}));
|
||||||
const itemsIndex = stmts.push(stmt) - 1;
|
const itemsIndex = stmts.push(stmt) - 1;
|
||||||
|
|
||||||
const sql = ParameterizedSQL.join(stmts, ';');
|
const sql = ParameterizedSQL.join(stmts, ';');
|
||||||
|
|
|
@ -7,13 +7,13 @@ describe('Postcode filter()', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
args: {
|
filter: {
|
||||||
|
|
||||||
},
|
},
|
||||||
|
limit: 1
|
||||||
};
|
};
|
||||||
const results = await models.Postcode.filter(ctx, options);
|
const results = await models.Postcode.filter(ctx, options);
|
||||||
|
|
||||||
expect(results.length).toBeGreaterThan(0);
|
expect(results.length).toEqual(1);
|
||||||
await tx.rollback();
|
await tx.rollback();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
await tx.rollback();
|
await tx.rollback();
|
||||||
|
@ -27,8 +27,10 @@ describe('Postcode filter()', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
args: {
|
filter: {
|
||||||
search: 46,
|
where: {
|
||||||
|
search: 46,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const results = await models.Postcode.filter(ctx, options);
|
const results = await models.Postcode.filter(ctx, options);
|
||||||
|
@ -47,8 +49,10 @@ describe('Postcode filter()', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
args: {
|
filter: {
|
||||||
search: 'Alz',
|
where: {
|
||||||
|
search: 'Alz',
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const results = await models.Postcode.filter(ctx, options);
|
const results = await models.Postcode.filter(ctx, options);
|
||||||
|
@ -67,8 +71,10 @@ describe('Postcode filter()', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
args: {
|
filter: {
|
||||||
search: 'one',
|
where: {
|
||||||
|
search: 'one',
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const results = await models.Postcode.filter(ctx, options);
|
const results = await models.Postcode.filter(ctx, options);
|
||||||
|
@ -87,8 +93,10 @@ describe('Postcode filter()', () => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const ctx = {
|
const ctx = {
|
||||||
args: {
|
filter: {
|
||||||
search: 'Ec',
|
where: {
|
||||||
|
search: 'Ec',
|
||||||
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const results = await models.Postcode.filter(ctx, options);
|
const results = await models.Postcode.filter(ctx, options);
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
GRANT EXECUTE ON PROCEDURE util.tx_commit TO guest;
|
||||||
|
GRANT EXECUTE ON PROCEDURE util.tx_rollback TO guest;
|
||||||
|
GRANT EXECUTE ON PROCEDURE util.tx_start TO guest;
|
|
@ -0,0 +1,85 @@
|
||||||
|
DROP PROCEDURE IF EXISTS vn.travel_cloneWithEntries;
|
||||||
|
|
||||||
|
DELIMITER $$
|
||||||
|
$$
|
||||||
|
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`travel_cloneWithEntries`(
|
||||||
|
IN vTravelFk INT,
|
||||||
|
IN vDateStart DATE,
|
||||||
|
IN vDateEnd DATE,
|
||||||
|
IN vWarehouseOutFk INT,
|
||||||
|
IN vWarehouseInFk INT,
|
||||||
|
IN vRef VARCHAR(255),
|
||||||
|
IN vAgencyModeFk INT,
|
||||||
|
OUT vNewTravelFk INT)
|
||||||
|
BEGIN
|
||||||
|
/**
|
||||||
|
* Clona un travel junto con sus entradas y compras
|
||||||
|
* @param vTravelFk travel plantilla a clonar
|
||||||
|
* @param vDateStart fecha del shipment del nuevo travel
|
||||||
|
* @param vDateEnd fecha del landing del nuevo travel
|
||||||
|
* @param vWarehouseOutFk warehouse del salida del nuevo travel
|
||||||
|
* @param vWarehouseInFk warehouse de landing del nuevo travel
|
||||||
|
* @param vRef referencia del nuevo travel
|
||||||
|
* @param vAgencyModeFk del nuevo travel
|
||||||
|
* @param vNewTravelFk id del nuevo travel
|
||||||
|
*/
|
||||||
|
DECLARE vNewEntryFk INT;
|
||||||
|
DECLARE vEvaNotes VARCHAR(255);
|
||||||
|
DECLARE vDone BOOL;
|
||||||
|
DECLARE vAuxEntryFk INT;
|
||||||
|
DECLARE vTx BOOLEAN DEFAULT !@@in_transaction;
|
||||||
|
DECLARE vRsEntry CURSOR FOR
|
||||||
|
SELECT e.id
|
||||||
|
FROM entry e
|
||||||
|
JOIN travel t ON t.id = e.travelFk
|
||||||
|
WHERE e.travelFk = vTravelFk;
|
||||||
|
|
||||||
|
DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
|
||||||
|
|
||||||
|
DECLARE EXIT HANDLER FOR SQLEXCEPTION
|
||||||
|
BEGIN
|
||||||
|
CALL util.tx_rollback(vTx);
|
||||||
|
RESIGNAL;
|
||||||
|
END;
|
||||||
|
|
||||||
|
CALL util.tx_start(vTx);
|
||||||
|
|
||||||
|
INSERT INTO travel (shipped, landed, warehouseInFk, warehouseOutFk, agencyModeFk, `ref`, isDelivered, isReceived, m3, cargoSupplierFk, kg,clonedFrom)
|
||||||
|
SELECT vDateStart, vDateEnd, vWarehouseInFk, vWarehouseOutFk, vAgencyModeFk, vRef, isDelivered, isReceived, m3,cargoSupplierFk, kg,vTravelFk
|
||||||
|
FROM travel
|
||||||
|
WHERE id = vTravelFk;
|
||||||
|
|
||||||
|
SET vNewTravelFk = LAST_INSERT_ID();
|
||||||
|
|
||||||
|
SET vDone = FALSE;
|
||||||
|
SET @isModeInventory = TRUE;
|
||||||
|
|
||||||
|
OPEN vRsEntry;
|
||||||
|
|
||||||
|
l: LOOP
|
||||||
|
SET vDone = FALSE;
|
||||||
|
FETCH vRsEntry INTO vAuxEntryFk;
|
||||||
|
|
||||||
|
IF vDone THEN
|
||||||
|
LEAVE l;
|
||||||
|
END IF;
|
||||||
|
|
||||||
|
CALL entry_cloneHeader(vAuxEntryFk, vNewEntryFk, vNewTravelFk);
|
||||||
|
CALL entry_copyBuys(vAuxEntryFk, vNewEntryFk);
|
||||||
|
|
||||||
|
SELECT evaNotes INTO vEvaNotes
|
||||||
|
FROM entry
|
||||||
|
WHERE id = vAuxEntryFk;
|
||||||
|
|
||||||
|
UPDATE entry
|
||||||
|
SET evaNotes = vEvaNotes
|
||||||
|
WHERE id = vNewEntryFk;
|
||||||
|
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
SET @isModeInventory = FALSE;
|
||||||
|
CLOSE vRsEntry;
|
||||||
|
|
||||||
|
CALL util.tx_commit(vTx);
|
||||||
|
END$$
|
||||||
|
DELIMITER ;
|
|
@ -0,0 +1,15 @@
|
||||||
|
DELIMITER $$
|
||||||
|
$$
|
||||||
|
|
||||||
|
CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `util`.`tx_commit`(IN tx BOOL)
|
||||||
|
BEGIN
|
||||||
|
/**
|
||||||
|
* Procedimiento para confirmar los cambios asociados a una transacción
|
||||||
|
*
|
||||||
|
* @param tx BOOL es true si existe transacción asociada
|
||||||
|
*/
|
||||||
|
IF tx THEN
|
||||||
|
COMMIT;
|
||||||
|
END IF;
|
||||||
|
END$$
|
||||||
|
DELIMITER ;
|
|
@ -0,0 +1,15 @@
|
||||||
|
DELIMITER $$
|
||||||
|
$$
|
||||||
|
|
||||||
|
CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `util`.`tx_rollback`(tx BOOL)
|
||||||
|
BEGIN
|
||||||
|
/**
|
||||||
|
* Procedimiento para deshacer los cambios asociados a una transacción
|
||||||
|
*
|
||||||
|
* @param tx BOOL es true si existe transacción asociada
|
||||||
|
*/
|
||||||
|
IF tx THEN
|
||||||
|
ROLLBACK;
|
||||||
|
END IF;
|
||||||
|
END$$
|
||||||
|
DELIMITER ;
|
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
|
||||||
|
DELIMITER $$
|
||||||
|
$$
|
||||||
|
|
||||||
|
CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `util`.`tx_start`(tx BOOL)
|
||||||
|
BEGIN
|
||||||
|
/**
|
||||||
|
* Procedimiento para iniciar una transacción
|
||||||
|
*
|
||||||
|
* @param tx BOOL es true si existe transacción asociada
|
||||||
|
*/
|
||||||
|
IF tx THEN
|
||||||
|
START TRANSACTION;
|
||||||
|
END IF;
|
||||||
|
END$$
|
||||||
|
DELIMITER ;
|
|
@ -29,7 +29,6 @@ module.exports = Self => {
|
||||||
|
|
||||||
const loopBackContext = LoopBackContext.getCurrentContext();
|
const loopBackContext = LoopBackContext.getCurrentContext();
|
||||||
ctx.req = loopBackContext.active;
|
ctx.req = loopBackContext.active;
|
||||||
if (await models.ACL.checkAccessAcl(ctx, 'Sale', 'canForceQuantity', 'WRITE')) return;
|
|
||||||
|
|
||||||
const ticketId = changes?.ticketFk || instance?.ticketFk;
|
const ticketId = changes?.ticketFk || instance?.ticketFk;
|
||||||
const itemId = changes?.itemFk || instance?.itemFk;
|
const itemId = changes?.itemFk || instance?.itemFk;
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
|
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
|
||||||
const UserError = require('vn-loopback/util/user-error');
|
const UserError = require('vn-loopback/util/user-error');
|
||||||
const loggable = require('vn-loopback/util/log');
|
|
||||||
|
|
||||||
module.exports = Self => {
|
module.exports = Self => {
|
||||||
Self.remoteMethodCtx('cloneWithEntries', {
|
Self.remoteMethodCtx('cloneWithEntries', {
|
||||||
|
@ -11,8 +10,9 @@ module.exports = Self => {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
required: true,
|
required: true,
|
||||||
description: 'The original travel id',
|
description: 'The original travel id',
|
||||||
http: {source: 'path'}
|
http: {source: 'path'},
|
||||||
}],
|
},
|
||||||
|
],
|
||||||
returns: {
|
returns: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description: 'The new cloned travel id',
|
description: 'The new cloned travel id',
|
||||||
|
@ -24,61 +24,75 @@ module.exports = Self => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Self.cloneWithEntries = async(ctx, id) => {
|
Self.cloneWithEntries = async(ctx, id, options) => {
|
||||||
const conn = Self.dataSource.connector;
|
const conn = Self.dataSource.connector;
|
||||||
const travel = await Self.findById(id, {
|
const myOptions = {};
|
||||||
fields: [
|
let tx = options?.transaction;
|
||||||
'id',
|
|
||||||
'shipped',
|
|
||||||
'landed',
|
|
||||||
'warehouseInFk',
|
|
||||||
'warehouseOutFk',
|
|
||||||
'agencyModeFk',
|
|
||||||
'ref'
|
|
||||||
]
|
|
||||||
});
|
|
||||||
const started = Date.vnNew();
|
|
||||||
const ended = Date.vnNew();
|
|
||||||
|
|
||||||
if (!travel)
|
try {
|
||||||
throw new UserError('Travel not found');
|
if (typeof options == 'object')
|
||||||
|
Object.assign(myOptions, options);
|
||||||
|
|
||||||
let stmts = [];
|
if (!myOptions.transaction) {
|
||||||
let stmt;
|
tx = await Self.beginTransaction({});
|
||||||
|
myOptions.transaction = tx;
|
||||||
|
}
|
||||||
|
|
||||||
stmt = new ParameterizedSQL(
|
const travel = await Self.findById(id, {
|
||||||
`CALL travel_cloneWithEntries(?, ?, ?, ?, ?, ?, ?, @vTravelFk)`, [
|
fields: [
|
||||||
id,
|
'id',
|
||||||
started,
|
'shipped',
|
||||||
ended,
|
'landed',
|
||||||
travel.warehouseOutFk,
|
'warehouseInFk',
|
||||||
travel.warehouseInFk,
|
'warehouseOutFk',
|
||||||
travel.ref,
|
'agencyModeFk',
|
||||||
travel.agencyModeFk
|
'ref'
|
||||||
]
|
]
|
||||||
);
|
});
|
||||||
stmts.push(stmt);
|
const started = Date.vnNew();
|
||||||
const newTravelIndex = stmts.push('SELECT @vTravelFk AS id') - 1;
|
const ended = Date.vnNew();
|
||||||
|
|
||||||
const sql = ParameterizedSQL.join(stmts, ';');
|
if (!travel)
|
||||||
const result = await conn.executeStmt(sql);
|
throw new UserError('Travel not found');
|
||||||
const [lastInsert] = result[newTravelIndex];
|
|
||||||
|
|
||||||
if (!lastInsert.id)
|
let stmts = [];
|
||||||
throw new UserError('Unable to clone this travel');
|
let stmt;
|
||||||
|
stmt = new ParameterizedSQL(
|
||||||
|
`CALL travel_cloneWithEntries(?, ?, ?, ?, ?, ?, ?, @vTravelFk)`, [
|
||||||
|
id,
|
||||||
|
started,
|
||||||
|
ended,
|
||||||
|
travel.warehouseOutFk,
|
||||||
|
travel.warehouseInFk,
|
||||||
|
travel.ref,
|
||||||
|
travel.agencyModeFk
|
||||||
|
]
|
||||||
|
);
|
||||||
|
stmts.push(stmt);
|
||||||
|
const newTravelIndex = stmts.push('SELECT @vTravelFk AS id') - 1;
|
||||||
|
|
||||||
const newTravel = await Self.findById(lastInsert.id, {
|
const sql = ParameterizedSQL.join(stmts, ';');
|
||||||
fields: [
|
const result = await conn.executeStmt(sql, myOptions);
|
||||||
'id',
|
const [lastInsert] = result[newTravelIndex];
|
||||||
'shipped',
|
|
||||||
'landed',
|
|
||||||
'warehouseInFk',
|
|
||||||
'warehouseOutFk',
|
|
||||||
'agencyModeFk',
|
|
||||||
'ref'
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
return newTravel.id;
|
if (!lastInsert.id)
|
||||||
|
throw new UserError('Unable to clone this travel');
|
||||||
|
|
||||||
|
const newTravel = await Self.findById(lastInsert.id, {
|
||||||
|
fields: [
|
||||||
|
'id',
|
||||||
|
'shipped',
|
||||||
|
'landed',
|
||||||
|
'warehouseInFk',
|
||||||
|
'warehouseOutFk',
|
||||||
|
'agencyModeFk',
|
||||||
|
'ref'
|
||||||
|
]
|
||||||
|
}, myOptions);
|
||||||
|
return newTravel.id;
|
||||||
|
} catch (e) {
|
||||||
|
if (tx) await tx.rollback();
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -5,73 +5,36 @@ describe('Travel cloneWithEntries()', () => {
|
||||||
const travelId = 5;
|
const travelId = 5;
|
||||||
const currentUserId = 1102;
|
const currentUserId = 1102;
|
||||||
const ctx = {req: {accessToken: {userId: currentUserId}}};
|
const ctx = {req: {accessToken: {userId: currentUserId}}};
|
||||||
let travelBefore;
|
|
||||||
let newTravelId;
|
let newTravelId;
|
||||||
|
|
||||||
// afterAll(async() => {
|
|
||||||
// try {
|
|
||||||
// const entries = await models.Entry.find({
|
|
||||||
// where: {
|
|
||||||
// travelFk: newTravelId
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
// const entriesId = entries.map(entry => entry.id);
|
|
||||||
|
|
||||||
// // Destroy all entries buys
|
|
||||||
// await models.Buy.destroyAll({
|
|
||||||
// where: {
|
|
||||||
// entryFk: {inq: entriesId}
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
// // Destroy travel entries
|
|
||||||
// await models.Entry.destroyAll({
|
|
||||||
// where: {
|
|
||||||
// travelFk: newTravelId
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
// // Destroy new travel
|
|
||||||
// await models.Travel.destroyById(newTravelId);
|
|
||||||
|
|
||||||
// // Restore original travel shipped & landed
|
|
||||||
// const travel = await models.Travel.findById(travelId);
|
|
||||||
// await travel.updateAttributes({
|
|
||||||
// shipped: travelBefore.shipped,
|
|
||||||
// landed: travelBefore.landed
|
|
||||||
// });
|
|
||||||
// } catch (error) {
|
|
||||||
// console.error(error);
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
|
|
||||||
it(`should clone the travel and the containing entries`, async() => {
|
it(`should clone the travel and the containing entries`, async() => {
|
||||||
pending('#2687 - Cannot make a data rollback because of the triggers');
|
const tx = await models.Travel.beginTransaction({
|
||||||
|
});
|
||||||
const warehouseThree = 3;
|
const warehouseThree = 3;
|
||||||
const agencyModeOne = 1;
|
const agencyModeOne = 1;
|
||||||
const yesterday = Date.vnNew();
|
try {
|
||||||
yesterday.setDate(yesterday.getDate() - 1);
|
const options = {transaction: tx};
|
||||||
|
newTravelId = await models.Travel.cloneWithEntries(ctx, travelId, options);
|
||||||
|
const travelEntries = await models.Entry.find({
|
||||||
|
where: {
|
||||||
|
travelFk: newTravelId
|
||||||
|
}
|
||||||
|
}, options);
|
||||||
|
const newTravel = await models.Travel.findById(travelId);
|
||||||
|
|
||||||
travelBefore = await models.Travel.findById(travelId);
|
expect(newTravelId).not.toEqual(travelId);
|
||||||
await travelBefore.updateAttributes({
|
expect(newTravel.ref).toEqual('fifth travel');
|
||||||
shipped: yesterday,
|
expect(newTravel.warehouseInFk).toEqual(warehouseThree);
|
||||||
landed: yesterday
|
expect(newTravel.warehouseOutFk).toEqual(warehouseThree);
|
||||||
});
|
expect(newTravel.agencyModeFk).toEqual(agencyModeOne);
|
||||||
|
expect(travelEntries.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
newTravelId = await models.Travel.cloneWithEntries(ctx, travelId);
|
await tx.rollback();
|
||||||
const travelEntries = await models.Entry.find({
|
const travelRemoved = await models.Travel.findById(newTravelId, options);
|
||||||
where: {
|
|
||||||
travelFk: newTravelId
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const newTravel = await models.Travel.findById(travelId);
|
expect(travelRemoved).toBeNull();
|
||||||
|
} catch (e) {
|
||||||
expect(newTravelId).not.toEqual(travelId);
|
if (tx) await tx.rollback();
|
||||||
expect(newTravel.ref).toEqual('fifth travel');
|
throw e;
|
||||||
expect(newTravel.warehouseInFk).toEqual(warehouseThree);
|
}
|
||||||
expect(newTravel.warehouseOutFk).toEqual(warehouseThree);
|
|
||||||
expect(newTravel.agencyModeFk).toEqual(agencyModeOne);
|
|
||||||
expect(travelEntries.length).toBeGreaterThan(0);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in New Issue