diff --git a/Jenkinsfile b/Jenkinsfile index c8a0579d7a..2b9e169b28 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,36 +1,57 @@ #!/usr/bin/env groovy + +def PROTECTED_BRANCH +def FROM_GIT +def RUN_TESTS + pipeline { agent any options { disableConcurrentBuilds() } + tools { + nodejs 'node-v20' + } environment { PROJECT_NAME = 'salix' - STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}" + STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}" } stages { stage('Checkout') { steps { script { switch (env.BRANCH_NAME) { - case 'master': - env.NODE_ENV = 'production' - env.BACK_REPLICAS = 4 + case 'dev': + env.NODE_ENV = 'dev' + env.BACK_REPLICAS = 1 break case 'test': env.NODE_ENV = 'test' env.BACK_REPLICAS = 2 break + case 'master': + env.NODE_ENV = 'production' + env.BACK_REPLICAS = 4 + break } - } - configFileProvider([ - configFile(fileId: "salix.groovy", - variable: 'GROOVY_FILE') - ]) { - load env.GROOVY_FILE - } + def packageJson = readJSON file: 'package.json' + env.VERSION = packageJson.version + env.GIT_COMMIT_MSG = sh( + script: 'git log -1 --pretty=%B ${GIT_COMMIT}', + returnStdout: true + ).trim() + + PROTECTED_BRANCH = [ + 'dev', + 'test', + 'master' + ].contains(env.BRANCH_NAME) + + FROM_GIT = JOB_NAME.startsWith('gitea/') + RUN_TESTS = !PROTECTED_BRANCH && FROM_GIT + } setEnv() } } @@ -38,82 +59,91 @@ pipeline { environment { NODE_ENV = "" } - steps { - nodejs('node-v20') { - sh 'npm install --no-audit --prefer-offline' - sh 'gulp install --ci' + parallel { + stage('Backend') { + steps { + sh 'npm install --no-audit --prefer-offline' + } + } + stage('Frontend') { + when { + expression { return FROM_GIT } + } + steps { + sh 'npm install --no-audit --prefer-offline --prefix=front' + } + } + stage('Print') { + when { + expression { return FROM_GIT } + } + steps { + sh 'npm install --no-audit --prefer-offline --prefix=print' + } } } } stage('Test') { - when { not { anyOf { - branch 'test' - branch 'master' - }}} + when { + expression { return RUN_TESTS } + } environment { NODE_ENV = "" TZ = 'Europe/Madrid' } parallel { - stage('Frontend') { - steps { - nodejs('node-v20') { - sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=4' - } - } - } stage('Backend') { steps { - nodejs('node-v20') { - sh 'npm run test:back:ci' - } + sh 'npm run test:back:ci' + } + } + stage('Frontend') { + steps { + sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=6' } } } } stage('Build') { - when { anyOf { - branch 'test' - branch 'master' - }} + when { + expression { return PROTECTED_BRANCH && FROM_GIT } + } environment { CREDENTIALS = credentials('docker-registry') } steps { - nodejs('node-v20') { - sh 'gulp build' - } - + sh 'gulp build' dockerBuild() } } stage('Deploy') { - when { anyOf { - branch 'test' - branch 'master' - }} - environment { - DOCKER_HOST = "${env.SWARM_HOST}" + when { + expression { return PROTECTED_BRANCH } } - steps { - sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}" - } - } - stage('Database') { - when { anyOf { - branch 'test' - branch 'master' - }} - steps { - configFileProvider([ - configFile(fileId: "config.${NODE_ENV}.ini", - variable: 'MYSQL_CONFIG') - ]) { - sh 'mkdir -p db/remotes' - sh 'cp "$MYSQL_CONFIG" db/remotes/$NODE_ENV.ini' + parallel { + stage('Database') { + steps { + configFileProvider([ + configFile(fileId: "config.${env.NODE_ENV}.ini", + variable: 'MYSQL_CONFIG') + ]) { + sh 'mkdir -p db/remotes' + sh 'cp "$MYSQL_CONFIG" db/remotes/$NODE_ENV.ini' + } + + sh 'npx myt push $NODE_ENV --force --commit' + } } - nodejs('node-v20') { - sh 'npx myt push $NODE_ENV --force --commit' + stage('Docker') { + when { + expression { return FROM_GIT } + } + environment { + DOCKER_HOST = "${env.SWARM_HOST}" + } + steps { + sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}" + } } } } @@ -121,7 +151,7 @@ pipeline { post { always { script { - if (!['master', 'test'].contains(env.BRANCH_NAME)) { + if (RUN_TESTS) { try { junit 'junitresults.xml' junit 'junit.xml' @@ -129,18 +159,28 @@ pipeline { echo e.toString() } } + } + } + success { + script { + if (env.BRANCH_NAME == 'master' && FROM_GIT) { + String message = env.GIT_COMMIT_MSG + int index = message.indexOf('\n') + if (index != -1) + message = message.substring(0, index) - if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return; - try { - mail( - to: env.COMMITTER_EMAIL, - subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}", - body: "Check status at ${env.BUILD_URL}" + rocketSend( + channel: 'vn-database', + message: "*DB version uploaded:* ${message}" + +"\n$COMMITTER_EMAIL ($BRANCH_NAME)" + +"\n$GIT_URL/commit/$GIT_COMMIT", + rawMessage: true ) - } catch (e) { - echo e.toString() } } } + unsuccessful { + sendEmail() + } } } diff --git a/back/tests.js b/back/tests.js index a9cdeb338b..c4c4770909 100644 --- a/back/tests.js +++ b/back/tests.js @@ -1,9 +1,10 @@ +/* eslint-disable no-console */ const path = require('path'); const Myt = require('@verdnatura/myt/myt'); const Run = require('@verdnatura/myt/myt-run'); let dataSources = require('../loopback/server/datasources.json'); -let myt; +let server; process.on('warning', warning => { console.log(warning.name); @@ -11,26 +12,33 @@ process.on('warning', warning => { console.log(warning.stack); }); -process.on('SIGUSR2', async() => { - if (myt) await myt.deinit(); -}); +process.on('SIGUSR2', rmServer); +process.on('exit', rmServer); -process.on('exit', async function() { - if (myt) await myt.deinit(); -}); +async function rmServer() { + if (!server) return; + await server.rm(); + server = null; +} async function test() { + console.log('Building and running DB container.'); + const isCI = process.argv[2] === 'ci'; - myt = new Myt(); + const myt = new Myt(); await myt.init({ workspace: path.join(__dirname, '..'), random: true, ci: isCI, - tmpfs: true, + tmpfs: process.platform == 'linux', network: isCI ? 'jenkins' : null }); - const {dbConfig} = await myt.run(Run); + server = await myt.run(Run); + await myt.deinit(); + const {dbConfig} = server; + + console.log('Initializing backend.'); dataSources = JSON.parse(JSON.stringify(dataSources)); Object.assign(dataSources.vn, { @@ -47,6 +55,8 @@ async function test() { // FIXME: Workaround to wait for loopback to be ready await app.models.Application.status(); + console.log('Running tests.'); + const Jasmine = require('jasmine'); const jasmine = new Jasmine(); @@ -82,9 +92,13 @@ async function test() { }); await jasmine.execute(); + + console.log('Stopping.'); + if (app) await app.disconnect(); - if (myt) await myt.deinit(); - console.log('App disconnected & container removed'); + await rmServer(); + + console.log('Tests ended.\n'); } test(); diff --git a/db/dump/fixtures.before.sql b/db/dump/fixtures.before.sql index 59d35d4e5e..2045da4bf4 100644 --- a/db/dump/fixtures.before.sql +++ b/db/dump/fixtures.before.sql @@ -6,9 +6,10 @@ */ SET foreign_key_checks = 0; --- CREATE ROLE 'salix'; --- GRANT 'salix' TO 'root'@'%'; --- SET DEFAULT ROLE 'salix' FOR 'root'@'%'; +DROP ROLE 'salix'; +CREATE ROLE 'salix'; +GRANT 'salix' TO 'root'@'%'; +SET DEFAULT ROLE 'salix' FOR 'root'@'%'; CREATE SCHEMA IF NOT EXISTS `vn2008`; CREATE SCHEMA IF NOT EXISTS `tmp`; diff --git a/docker-compose.yml b/docker-compose.yml index 0d2db1a634..222e753d7c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '3.7' services: front: - image: registry.verdnatura.es/salix-front:${BRANCH_NAME:?} + image: registry.verdnatura.es/salix-front:${VERSION:?} build: context: . dockerfile: front/Dockerfile @@ -16,7 +16,7 @@ services: limits: memory: 1G back: - image: registry.verdnatura.es/salix-back:${BRANCH_NAME:?} + image: registry.verdnatura.es/salix-back:${VERSION:?} build: . ports: - 3000 diff --git a/e2e/paths/05-ticket/02_expeditions_and_log.spec.js b/e2e/paths/05-ticket/02_expeditions_and_log.spec.js index b97576940b..4e80050438 100644 --- a/e2e/paths/05-ticket/02_expeditions_and_log.spec.js +++ b/e2e/paths/05-ticket/02_expeditions_and_log.spec.js @@ -27,6 +27,6 @@ describe('Ticket expeditions and log path', () => { const result = await page .countElement(selectors.ticketExpedition.expeditionRow); - expect(result).toEqual(4); + expect(result).toEqual(6); }); }); diff --git a/e2e/paths/13-supplier/02_basic_data.spec.js b/e2e/paths/13-supplier/02_basic_data.spec.js index 72ea6d8909..79a9898caa 100644 --- a/e2e/paths/13-supplier/02_basic_data.spec.js +++ b/e2e/paths/13-supplier/02_basic_data.spec.js @@ -41,10 +41,10 @@ describe('Supplier basic data path', () => { expect(result).toEqual('Plants Nick SL'); }); - it('should check the isSerious checkbox is now unchecked', async() => { + it('should check the isSerious checkbox is now checked', async() => { const result = await page.checkboxState(selectors.supplierBasicData.isSerious); - expect(result).toBe('unchecked'); + expect(result).toBe('checked'); }); it('should check the isActive checkbox is now unchecked', async() => { diff --git a/package.json b/package.json index 3f3a3ad8e7..041bd39df3 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "salix-back", - "version": "24.06.01", + "version": "24.6.0", "author": "Verdnatura Levante SL", "description": "Salix backend", "license": "GPL-3.0",