Merge branch 'dev' into 7348-client-hasDailyInvoice
gitea/salix/pipeline/pr-dev This commit looks good
Details
gitea/salix/pipeline/pr-dev This commit looks good
Details
This commit is contained in:
commit
f25d5c3bb5
|
@ -314,5 +314,4 @@ INSERT INTO mysql.roles_mapping (`User`, `Host`, `Role`, `Admin_option`)
|
|||
SELECT SUBSTR(`User`, @prefixLen + 1), `Host`, `Role`, `Admin_option`
|
||||
FROM mysql.roles_mapping
|
||||
WHERE `User` LIKE @prefixedLike AND `Host` = @genRoleHost;
|
||||
|
||||
FLUSH PRIVILEGES;
|
||||
|
|
|
@ -121,7 +121,7 @@ INSERT INTO hedera.orderConfig (`id`, `employeeFk`, `defaultAgencyFk`, `guestMet
|
|||
INSERT INTO `account`.`user`(`id`,`name`,`nickname`, `password`,`role`,`active`,`email`,`lang`, `image`)
|
||||
VALUES
|
||||
(1101, 'brucewayne', 'Bruce Wayne', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 2, 1, 'BruceWayne@mydomain.com', 'es','1101'),
|
||||
(1102, 'petterparker', 'Petter Parker', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 2, 1, 'PetterParker@mydomain.com', 'en','1102'),
|
||||
(1102, 'petterparker', 'Petter Parker', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 131, 1, 'PetterParker@mydomain.com', 'en','1102'),
|
||||
(1103, 'clarkkent', 'Clark Kent', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 2, 1, 'ClarkKent@mydomain.com', 'fr','1103'),
|
||||
(1104, 'tonystark', 'Tony Stark', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 2, 1, 'TonyStark@mydomain.com', 'es','1104'),
|
||||
(1105, 'maxeisenhardt', 'Max Eisenhardt', '$2b$10$UzQHth.9UUQ1T5aiQJ21lOU0oVlbxoqH4PFM9V8T90KNSAcg0eEL2', 2, 1, 'MaxEisenhardt@mydomain.com', 'pt','1105'),
|
||||
|
@ -1500,7 +1500,8 @@ INSERT INTO `vn`.`travel`(`id`,`shipped`, `landed`, `warehouseInFk`, `warehouseO
|
|||
(5, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 3, 3, 1, 50.00, 500, 'fifth travel', 1, 1, 5),
|
||||
(6, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 4, 4, 1, 50.00, 500, 'sixth travel', 1, 2, 6),
|
||||
(7, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 5, 4, 1, 50.00, 500, 'seventh travel', 2, 1, 7),
|
||||
(8, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 5, 1, 1, 50.00, 500, 'eight travel', 1, 2, 10);
|
||||
(8, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 5, 1, 1, 50.00, 500, 'eight travel', 1, 2, 10),
|
||||
(10, DATE_ADD(util.VN_CURDATE(), INTERVAL + 5 DAY), DATE_ADD(util.VN_CURDATE(), INTERVAL + 5 DAY), 5, 1, 1, 50.00, 500, 'nineth travel', 1, 2, 10);
|
||||
|
||||
INSERT INTO `vn`.`entry`(`id`, `supplierFk`, `created`, `travelFk`, `isConfirmed`, `companyFk`, `invoiceNumber`, `reference`, `isExcludedFromAvailable`, `isRaid`, `evaNotes`)
|
||||
VALUES
|
||||
|
@ -1511,7 +1512,9 @@ INSERT INTO `vn`.`entry`(`id`, `supplierFk`, `created`, `travelFk`, `isConfirmed
|
|||
(5, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 5, 0, 442, 'IN2005', 'Movement 5', 0, 0, 'observation five'),
|
||||
(6, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 6, 0, 442, 'IN2006', 'Movement 6', 0, 0, 'observation six'),
|
||||
(7, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 7, 0, 442, 'IN2007', 'Movement 7', 0, 0, 'observation seven'),
|
||||
(8, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 7, 0, 442, 'IN2008', 'Movement 8', 1, 1, '');
|
||||
(8, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL -1 MONTH), 7, 0, 442, 'IN2008', 'Movement 8', 1, 1, ''),
|
||||
(9, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL +2 DAY), 10, 0, 442, 'IN2009', 'Movement 9', 1, 1, ''),
|
||||
(10, 2, DATE_ADD(util.VN_CURDATE(), INTERVAL +2 DAY), 10, 0, 442, 'IN2009', 'Movement 9', 1, 1, '');
|
||||
|
||||
INSERT INTO `bs`.`waste`(`buyer`, `year`, `week`, `family`, `itemFk`, `itemTypeFk`, `saleTotal`, `saleWaste`, `rate`)
|
||||
VALUES
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
|
||||
const buildFilter = require('vn-loopback/util/filter').buildFilter;
|
||||
const mergeFilters = require('vn-loopback/util/filter').mergeFilters;
|
||||
|
@ -95,6 +94,11 @@ module.exports = Self => {
|
|||
arg: 'to',
|
||||
type: 'date',
|
||||
description: `The to date filter`
|
||||
},
|
||||
{
|
||||
arg: 'days',
|
||||
type: 'number',
|
||||
description: `N days interval`
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
|
@ -192,6 +196,15 @@ module.exports = Self => {
|
|||
JOIN vn.currency cu ON cu.id = e.currencyFk`
|
||||
);
|
||||
|
||||
if (ctx.args.days) {
|
||||
stmt.merge({
|
||||
sql: `
|
||||
AND t.shipped <= util.VN_CURDATE() + INTERVAL ? DAY
|
||||
AND t.shipped >= util.VN_CURDATE()
|
||||
`,
|
||||
params: [ctx.args.days]
|
||||
});
|
||||
}
|
||||
stmt.merge(conn.makeSuffix(filter));
|
||||
const itemsIndex = stmts.push(stmt) - 1;
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ describe('Entry filter()', () => {
|
|||
|
||||
const result = await models.Entry.filter(ctx, options);
|
||||
|
||||
expect(result.length).toEqual(9);
|
||||
expect(result.length).toEqual(11);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
|
@ -55,13 +55,13 @@ describe('Entry filter()', () => {
|
|||
|
||||
try {
|
||||
const ctx = {
|
||||
args: {},
|
||||
args: {days: 6},
|
||||
req: {accessToken: {userId: 1102}}
|
||||
};
|
||||
|
||||
const result = await models.Entry.filter(ctx, options);
|
||||
|
||||
expect(result.length).toEqual(6);
|
||||
expect(result.length).toEqual(2);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
|
@ -84,7 +84,7 @@ describe('Entry filter()', () => {
|
|||
|
||||
const result = await models.Entry.filter(ctx, options);
|
||||
|
||||
expect(result.length).toEqual(6);
|
||||
expect(result.length).toEqual(8);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
|
@ -107,7 +107,7 @@ describe('Entry filter()', () => {
|
|||
|
||||
const result = await models.Entry.filter(ctx, options);
|
||||
|
||||
expect(result.length).toEqual(6);
|
||||
expect(result.length).toEqual(8);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
|
@ -131,7 +131,7 @@ describe('Entry filter()', () => {
|
|||
|
||||
const result = await models.Entry.filter(ctx, options);
|
||||
|
||||
expect(result.length).toEqual(8);
|
||||
expect(result.length).toEqual(10);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
|
|
|
@ -79,7 +79,7 @@ describe('Travel extraCommunityFilter()', () => {
|
|||
|
||||
const result = await app.models.Travel.extraCommunityFilter(ctx, filter);
|
||||
|
||||
expect(result.length).toEqual(8);
|
||||
expect(result.length).toEqual(9);
|
||||
});
|
||||
|
||||
it('should return the travel matching "cargoSupplierFk"', async() => {
|
||||
|
@ -110,6 +110,6 @@ describe('Travel extraCommunityFilter()', () => {
|
|||
|
||||
const result = await app.models.Travel.extraCommunityFilter(ctx, filter);
|
||||
|
||||
expect(result.length).toEqual(1);
|
||||
expect(result.length).toEqual(2);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -50,7 +50,7 @@ describe('Travel filter()', () => {
|
|||
|
||||
const result = await app.models.Travel.filter(ctx);
|
||||
|
||||
expect(result.length).toEqual(5);
|
||||
expect(result.length).toEqual(6);
|
||||
});
|
||||
|
||||
it('should return the routes matching "shipped from" and "shipped to"', async() => {
|
||||
|
@ -80,6 +80,6 @@ describe('Travel filter()', () => {
|
|||
|
||||
const result = await app.models.Travel.filter(ctx);
|
||||
|
||||
expect(result.length).toEqual(5);
|
||||
expect(result.length).toEqual(6);
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue