Merge branch 'dev' of https://git.verdnatura.es/salix into dev
This commit is contained in:
commit
791c0e2d41
|
@ -8,6 +8,8 @@ env.BRANCH_NAME = branchName;
|
||||||
env.TAG = "${env.BUILD_NUMBER}";
|
env.TAG = "${env.BUILD_NUMBER}";
|
||||||
env.salixUser="${env.salixUser}";
|
env.salixUser="${env.salixUser}";
|
||||||
env.salixPassword="${env.salixPassword}";
|
env.salixPassword="${env.salixPassword}";
|
||||||
|
env.salixHost = "${env.productionSalixHost}";
|
||||||
|
env.salixPort = "${env.productionSalixPort}";
|
||||||
|
|
||||||
switch (branchName){
|
switch (branchName){
|
||||||
case branchTest:
|
case branchTest:
|
||||||
|
|
|
@ -161,8 +161,12 @@ gulp.task('docker-compose', async () => {
|
||||||
// dockerFile = 'Dockerfile';
|
// dockerFile = 'Dockerfile';
|
||||||
|
|
||||||
composeYml.services[service.name] = {
|
composeYml.services[service.name] = {
|
||||||
environment: ['NODE_ENV=${NODE_ENV}' ,'salixHost=${salixHost}', 'salixPort=${salixPort}',
|
environment: [
|
||||||
'salixUser=${salixUser}', 'salixPassword=${salixPassword}'
|
'NODE_ENV=${NODE_ENV}',
|
||||||
|
'salixHost=${salixHost}',
|
||||||
|
'salixPort=${salixPort}',
|
||||||
|
'salixUser=${salixUser}',
|
||||||
|
'salixPassword=${salixPassword}'
|
||||||
],
|
],
|
||||||
container_name: `\${BRANCH_NAME}-${service.name}`,
|
container_name: `\${BRANCH_NAME}-${service.name}`,
|
||||||
image: `${service.name}:\${TAG}`,
|
image: `${service.name}:\${TAG}`,
|
||||||
|
|
|
@ -21,7 +21,7 @@ module.exports = Self => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Self.clone = async (itemId, callback) => {
|
Self.clone = async itemId => {
|
||||||
let filter = {
|
let filter = {
|
||||||
where: {
|
where: {
|
||||||
id: itemId
|
id: itemId
|
||||||
|
@ -30,6 +30,7 @@ module.exports = Self => {
|
||||||
{relation: "itemTag", scope: {order: "priority ASC", include: {relation: "tag"}}}
|
{relation: "itemTag", scope: {order: "priority ASC", include: {relation: "tag"}}}
|
||||||
]
|
]
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let origin = await Self.findOne(filter);
|
let origin = await Self.findOne(filter);
|
||||||
let copy = JSON.parse(JSON.stringify(origin));
|
let copy = JSON.parse(JSON.stringify(origin));
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
let UserError = require('../../../loopback/common/helpers').UserError;
|
||||||
|
|
||||||
module.exports = function(Self) {
|
module.exports = function(Self) {
|
||||||
require('../methods/item/filter.js')(Self);
|
require('../methods/item/filter.js')(Self);
|
||||||
require('../methods/item/clone.js')(Self);
|
require('../methods/item/clone.js')(Self);
|
||||||
|
@ -5,4 +7,24 @@ module.exports = function(Self) {
|
||||||
|
|
||||||
Self.validatesPresenceOf('name', {message: 'Cannot be blank'});
|
Self.validatesPresenceOf('name', {message: 'Cannot be blank'});
|
||||||
Self.validatesPresenceOf('originFk', {message: 'Cannot be blank'});
|
Self.validatesPresenceOf('originFk', {message: 'Cannot be blank'});
|
||||||
|
|
||||||
|
Self.observe('before save', async function(ctx) {
|
||||||
|
await Self.availableId(ctx);
|
||||||
|
});
|
||||||
|
|
||||||
|
Self.availableId = async function(ctx) {
|
||||||
|
if (ctx.isNewInstance) {
|
||||||
|
try {
|
||||||
|
let query = `SELECT i1.id + 1 as id FROM vn.item i1
|
||||||
|
LEFT JOIN vn.item i2 ON i1.id + 1 = i2.id
|
||||||
|
WHERE i2.id IS NULL ORDER BY i1.id LIMIT 1`;
|
||||||
|
|
||||||
|
let newId = await Self.rawSql(query);
|
||||||
|
|
||||||
|
ctx.instance.id = newId[0].id;
|
||||||
|
} catch (e) {
|
||||||
|
throw new UserError(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
const app = require('../../../../item/server/server');
|
||||||
|
let ctx = {isNewInstance: true, instance: {}};
|
||||||
|
describe('Item availableId()', () => {
|
||||||
|
it('should define ctx.instance.id with the expected id', async() => {
|
||||||
|
let Item = app.models.Item;
|
||||||
|
|
||||||
|
await Item.availableId(ctx);
|
||||||
|
|
||||||
|
expect(ctx.instance.id).toEqual(6);
|
||||||
|
});
|
||||||
|
});
|
Loading…
Reference in New Issue