584 lines
16 KiB
JavaScript
584 lines
16 KiB
JavaScript
require('require-yaml');
|
|
const gulp = require('gulp');
|
|
const exec = require('child_process').exec;
|
|
const PluginError = require('plugin-error');
|
|
const argv = require('minimist')(process.argv.slice(2));
|
|
const log = require('fancy-log');
|
|
const request = require('request');
|
|
const e2eConfig = require('./e2e/helpers/config.js');
|
|
|
|
// Configuration
|
|
|
|
let isWindows = /^win/.test(process.platform);
|
|
|
|
if (argv.NODE_ENV)
|
|
process.env.NODE_ENV = argv.NODE_ENV;
|
|
|
|
let langs = ['es', 'en'];
|
|
let srcDir = './front';
|
|
let modulesDir = './modules';
|
|
let buildDir = 'dist';
|
|
let containerId = 'salix-db';
|
|
|
|
let dataSources = require('./loopback/server/datasources.json');
|
|
let dbConf = dataSources.vn;
|
|
|
|
let backSources = [
|
|
'!node_modules',
|
|
'loopback',
|
|
'modules/*/back/**',
|
|
'modules/*/back/*',
|
|
'back',
|
|
'print'
|
|
];
|
|
|
|
// Development
|
|
|
|
const localesRoutes = gulp.parallel(locales, routes);
|
|
localesRoutes.description = `Builds locales and routes`;
|
|
|
|
const front = gulp.series(clean, gulp.parallel(localesRoutes, watch, webpackDevServer));
|
|
front.description = `Starts frontend service`;
|
|
|
|
function backOnly(done) {
|
|
let app = require(`./loopback/server/server`);
|
|
app.start();
|
|
app.on('started', done);
|
|
}
|
|
backOnly.description = `Starts backend service`;
|
|
|
|
function backWatch(done) {
|
|
const nodemon = require('gulp-nodemon');
|
|
|
|
// XXX: Workaround to avoid nodemon bug
|
|
// https://github.com/remy/nodemon/issues/1346
|
|
let commands = ['node --inspect ./node_modules/gulp/bin/gulp.js'];
|
|
if (!isWindows) commands.unshift('sleep 1');
|
|
|
|
nodemon({
|
|
exec: commands.join(' && '),
|
|
ext: 'js html css json',
|
|
args: ['backOnly'],
|
|
watch: backSources,
|
|
done: done
|
|
});
|
|
}
|
|
backWatch.description = `Starts backend in waching mode`;
|
|
|
|
const back = gulp.series(dockerStart, backWatch);
|
|
back.description = `Starts backend and database service`;
|
|
|
|
const defaultTask = gulp.parallel(front, back);
|
|
defaultTask.description = `Starts all application services`;
|
|
|
|
// Backend tests
|
|
|
|
async function backTestOnce() {
|
|
let bootOptions;
|
|
|
|
if (argv['random'])
|
|
bootOptions = {dataSources};
|
|
|
|
let app = require(`./loopback/server/server`);
|
|
app.boot(bootOptions);
|
|
|
|
await new Promise((resolve, reject) => {
|
|
const jasmine = require('gulp-jasmine');
|
|
|
|
let options = {errorOnFail: false};
|
|
|
|
if (argv.junit) {
|
|
const reporters = require('jasmine-reporters');
|
|
options.reporter = new reporters.JUnitXmlReporter();
|
|
}
|
|
|
|
let backSpecFiles = [
|
|
'back/**/*.spec.js',
|
|
'loopback/**/*.spec.js',
|
|
'modules/*/back/**/*.spec.js'
|
|
];
|
|
|
|
gulp.src(backSpecFiles)
|
|
.pipe(jasmine(options))
|
|
.on('end', resolve)
|
|
.resume();
|
|
});
|
|
|
|
await app.disconnect();
|
|
}
|
|
backTestOnce.description = `Runs the backend tests once, can receive --junit arg to save reports on a xml file`;
|
|
|
|
async function backTestDockerOnce() {
|
|
let containerId = await docker();
|
|
try {
|
|
await backTestOnce();
|
|
} catch (e) {
|
|
throw e;
|
|
} finally {
|
|
if (argv['random'])
|
|
await execP(`docker rm -fv ${containerId}`);
|
|
}
|
|
}
|
|
backTestDockerOnce.description = `Runs backend tests using in site container once`;
|
|
|
|
async function backTestDocker() {
|
|
let containerId = await docker();
|
|
try {
|
|
await backTest();
|
|
} catch (e) {
|
|
throw e;
|
|
} finally {
|
|
if (argv['random'])
|
|
await execP(`docker rm -fv ${containerId}`);
|
|
}
|
|
}
|
|
backTestDocker.description = `Runs backend tests restoring fixtures first`;
|
|
|
|
function backTest(done) {
|
|
const nodemon = require('gulp-nodemon');
|
|
|
|
nodemon({
|
|
exec: ['node ./node_modules/gulp/bin/gulp.js'],
|
|
args: ['backTestOnce'],
|
|
watch: backSources,
|
|
done: done
|
|
});
|
|
}
|
|
backTest.description = `Watches for changes in modules to execute backTest task`;
|
|
|
|
// End to end tests
|
|
|
|
function e2eOnly() {
|
|
require('@babel/register')({presets: ['@babel/preset-env']});
|
|
require('@babel/polyfill');
|
|
|
|
const jasmine = require('gulp-jasmine');
|
|
const SpecReporter = require('jasmine-spec-reporter').SpecReporter;
|
|
const createNightmare = require('./e2e/helpers/nightmare');
|
|
|
|
if (argv.show || argv.s)
|
|
process.env.E2E_SHOW = true;
|
|
process.env.ELECTRON_DISABLE_SECURITY_WARNINGS = true;
|
|
|
|
|
|
const specFiles = [
|
|
`${__dirname}/e2e/paths/01*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/02*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/03*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/04*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/05*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/06*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/07*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/08*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/09*/*[sS]pec.js`,
|
|
`${__dirname}/e2e/paths/**/*[sS]pec.js`,
|
|
`${__dirname}/e2e/helpers/extensions.js`
|
|
];
|
|
|
|
return gulp.src(specFiles).pipe(jasmine({
|
|
errorOnFail: false,
|
|
timeout: 10000,
|
|
reporter: [
|
|
new SpecReporter({
|
|
spec: {
|
|
displayStacktrace: 'summary',
|
|
displaySuccessful: true,
|
|
displayFailedSpec: true,
|
|
displaySpecDuration: true,
|
|
}
|
|
})
|
|
]
|
|
})
|
|
.on('jasmineDone', function() {
|
|
const nightmare = createNightmare();
|
|
nightmare.end(() => {});
|
|
})
|
|
);
|
|
}
|
|
e2eOnly.description = `Runs the e2e tests only`;
|
|
|
|
async function backendStatus() {
|
|
const milliseconds = 250;
|
|
return new Promise(resolve => {
|
|
let timer;
|
|
let attempts = 1;
|
|
timer = setInterval(() => {
|
|
const url = `${e2eConfig.url}/api/Applications/status`;
|
|
request.get(url, (err, res) => {
|
|
if (err || attempts > 100) // 250ms * 100 => 25s timeout
|
|
throw new Error('Could not connect to backend');
|
|
else if (res && res.body == 'true') {
|
|
clearInterval(timer);
|
|
resolve(attempts);
|
|
} else
|
|
attempts++;
|
|
});
|
|
}, milliseconds);
|
|
});
|
|
}
|
|
backendStatus.description = `Performs a simple requests to check the backend status`;
|
|
|
|
e2e = gulp.series(docker, async function isBackendReady() {
|
|
const attempts = await backendStatus();
|
|
log(`Backend ready after ${attempts} attempt(s)`);
|
|
|
|
return attempts;
|
|
}, e2eOnly);
|
|
e2e.description = `Restarts database and runs the e2e tests`;
|
|
|
|
function smokesOnly() {
|
|
const jasmine = require('gulp-jasmine');
|
|
return gulp.src('./e2e/smokes-tests.js')
|
|
.pipe(jasmine({reporter: 'none'}));
|
|
}
|
|
smokesOnly.description = `Runs the smokes tests only`;
|
|
|
|
smokes = gulp.series(docker, smokesOnly);
|
|
smokes.description = `Restarts database and runs the smokes tests`;
|
|
|
|
function install() {
|
|
const install = require('gulp-install');
|
|
const print = require('gulp-print');
|
|
|
|
let npmArgs = [];
|
|
if (argv.ci) npmArgs = ['--no-audit', '--prefer-offline'];
|
|
|
|
let packageFiles = ['front/package.json', 'print/package.json'];
|
|
return gulp.src(packageFiles)
|
|
.pipe(print(filepath => {
|
|
return `Installing packages in ${filepath}`;
|
|
}))
|
|
.pipe(install({npm: npmArgs}));
|
|
}
|
|
install.description = `Installs node dependencies in all directories`;
|
|
|
|
const i = gulp.series(install);
|
|
i.description = `Alias for the 'install' task`;
|
|
|
|
// Deployment
|
|
|
|
const build = gulp.series(clean, gulp.parallel(localesRoutes, webpack));
|
|
build.description = `Generates binaries and configuration files`;
|
|
|
|
function clean() {
|
|
const del = require('del');
|
|
const files = [
|
|
`${buildDir}/*`
|
|
];
|
|
return del(files, {force: true});
|
|
}
|
|
clean.description = `Cleans all files generated by the 'build' task`;
|
|
|
|
// Webpack
|
|
|
|
function webpack(done) {
|
|
const webpackCompile = require('webpack');
|
|
const merge = require('webpack-merge');
|
|
|
|
let wpConfig = require('./webpack.config.js');
|
|
wpConfig = merge(wpConfig, {});
|
|
|
|
let compiler = webpackCompile(wpConfig);
|
|
|
|
compiler.run(function(err, stats) {
|
|
if (err) throw new PluginError('webpack', err);
|
|
log('[webpack]', stats.toString(wpConfig.stats));
|
|
done();
|
|
});
|
|
}
|
|
webpack.description = `Transpiles application into files`;
|
|
|
|
function webpackDevServer(done) {
|
|
const webpack = require('webpack');
|
|
const merge = require('webpack-merge');
|
|
const WebpackDevServer = require('webpack-dev-server');
|
|
|
|
let wpConfig = require('./webpack.config.js');
|
|
wpConfig = merge(wpConfig, {});
|
|
|
|
let devServer = wpConfig.devServer;
|
|
|
|
for (let entryName in wpConfig.entry) {
|
|
let entry = wpConfig.entry[entryName];
|
|
if (!Array.isArray(entry))
|
|
entry = [entry];
|
|
|
|
let wdsAssets = [
|
|
`webpack-dev-server/client?http://localhost:${devServer.port}/`,
|
|
`webpack/hot/dev-server`
|
|
];
|
|
wpConfig.entry[entryName] = wdsAssets.concat(entry);
|
|
}
|
|
|
|
let compiler = webpack(wpConfig);
|
|
new WebpackDevServer(compiler, wpConfig.devServer)
|
|
.listen(devServer.port, devServer.host, function(err) {
|
|
if (err) throw new PluginError('webpack-dev-server', err);
|
|
// XXX: Keep the server alive or continue?
|
|
done();
|
|
});
|
|
}
|
|
webpackDevServer.description = `Transpiles application into memory`;
|
|
|
|
// Locale
|
|
|
|
let localeFiles = [
|
|
`${srcDir}/**/locale/*.yml`,
|
|
`${modulesDir}/*/front/**/locale/*.yml`
|
|
];
|
|
|
|
/**
|
|
* Mixes all locale files into one JSON file per module and language. It looks
|
|
* recursively in all project directories for locale folders with per language
|
|
* yaml translation files.
|
|
*
|
|
* @return {Stream} The merged gulp streams
|
|
*/
|
|
function locales() {
|
|
const mergeJson = require('gulp-merge-json');
|
|
const gulpFile = require('gulp-file');
|
|
const yaml = require('gulp-yaml');
|
|
const merge = require('merge-stream');
|
|
const fs = require('fs-extra');
|
|
|
|
let streams = [];
|
|
let localePaths = [];
|
|
|
|
let modules = fs.readdirSync(modulesDir);
|
|
for (let mod of modules)
|
|
localePaths[mod] = `${modulesDir}/${mod}`;
|
|
|
|
let baseMods = ['core', 'salix'];
|
|
for (let mod of baseMods)
|
|
localePaths[mod] = `${srcDir}/${mod}`;
|
|
|
|
for (let mod in localePaths) {
|
|
let path = localePaths[mod];
|
|
for (let lang of langs) {
|
|
let localeFiles = `${path}/**/locale/${lang}.yml`;
|
|
streams.push(gulp.src(localeFiles)
|
|
.pipe(yaml())
|
|
.pipe(mergeJson({fileName: `${lang}.json`}))
|
|
.pipe(gulp.dest(`${buildDir}/locale/${mod}`)));
|
|
}
|
|
}
|
|
|
|
for (let mod in localePaths) {
|
|
for (let lang of langs) {
|
|
let file = `${buildDir}/locale/${mod}/${lang}.json`;
|
|
if (fs.existsSync(file)) continue;
|
|
streams.push(gulpFile('en.json', '{}', {src: true})
|
|
.pipe(gulp.dest(`${buildDir}/locale/${mod}`)));
|
|
}
|
|
}
|
|
|
|
return merge(streams);
|
|
}
|
|
locales.description = `Generates client locale files`;
|
|
|
|
// Routes
|
|
|
|
let routeFiles = `${modulesDir}/*/front/routes.json`;
|
|
|
|
function routes() {
|
|
const concat = require('gulp-concat');
|
|
const wrap = require('gulp-wrap');
|
|
|
|
return gulp.src(routeFiles)
|
|
.pipe(concat('routes.js', {newLine: ','}))
|
|
.pipe(wrap('var routes = [<%=contents%>\n];'))
|
|
.pipe(gulp.dest(buildDir));
|
|
}
|
|
routes.description = 'Merges all module routes file into one file';
|
|
|
|
// Watch
|
|
|
|
function watch(done) {
|
|
gulp.watch(routeFiles, gulp.series(routes));
|
|
gulp.watch(localeFiles, gulp.series(locales));
|
|
done();
|
|
}
|
|
watch.description = `Watches for changes in routes and locale files`;
|
|
|
|
// Docker
|
|
|
|
/**
|
|
* Builds the database image and runs a container. It only rebuilds the
|
|
* image when fixtures have been modified or when the day on which the
|
|
* image was built is different to today. Some workarounds have been used
|
|
* to avoid a bug with OverlayFS driver on MacOS.
|
|
*/
|
|
async function docker() {
|
|
let d = new Date();
|
|
let pad = v => v < 10 ? '0' + v : v;
|
|
let stamp = `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`;
|
|
await execP(`docker build --build-arg STAMP=${stamp} -t salix-db ./db`);
|
|
|
|
let dockerArgs = `--name ${containerId} -p 3306:${dbConf.port}`;
|
|
|
|
if (argv['random'])
|
|
dockerArgs = '-p 3306';
|
|
else {
|
|
try {
|
|
await execP(`docker rm -fv ${containerId}`);
|
|
} catch (e) {}
|
|
}
|
|
|
|
let runChown = process.platform != 'linux';
|
|
if (argv['run-chown']) runChown = true;
|
|
|
|
let result = await execP(`docker run --env RUN_CHOWN=${runChown} -d ${dockerArgs} salix-db`);
|
|
containerId = result.stdout;
|
|
|
|
try {
|
|
if (argv['random']) {
|
|
let inspect = await execP(`docker inspect -f "{{json .NetworkSettings}}" ${containerId}`);
|
|
let netSettings = JSON.parse(inspect.stdout);
|
|
|
|
dbConf.host = netSettings.Gateway;
|
|
dbConf.port = netSettings.Ports['3306/tcp'][0]['HostPort'];
|
|
}
|
|
|
|
if (runChown) await dockerWait();
|
|
} catch (err) {
|
|
if (argv['random'])
|
|
await execP(`docker rm -fv ${containerId}`);
|
|
throw err;
|
|
}
|
|
|
|
return containerId;
|
|
}
|
|
docker.description = `Builds the database image and runs a container`;
|
|
|
|
/**
|
|
* Does the minium effort to start the database container, if it doesn't exists
|
|
* calls the 'docker' task, if it is started does nothing. Keep in mind that when
|
|
* you do not rebuild the docker you may be using an outdated version of it.
|
|
* See the 'docker' task for more info.
|
|
*/
|
|
async function dockerStart() {
|
|
let state;
|
|
try {
|
|
let result = await execP(`docker inspect -f "{{json .State}}" ${containerId}`);
|
|
state = JSON.parse(result.stdout);
|
|
} catch (err) {
|
|
return await docker();
|
|
}
|
|
|
|
switch (state.Status) {
|
|
case 'running':
|
|
return;
|
|
case 'exited':
|
|
await execP(`docker start ${containerId}`);
|
|
await dockerWait();
|
|
return;
|
|
default:
|
|
throw new Error(`Unknown docker status: ${state.Status}`);
|
|
}
|
|
}
|
|
dockerStart.description = `Starts the database container`;
|
|
|
|
function dockerWait() {
|
|
return new Promise((resolve, reject) => {
|
|
const mysql = require('mysql2');
|
|
|
|
let interval = 100;
|
|
let elapsedTime = 0;
|
|
let maxInterval = 4 * 60 * 1000;
|
|
|
|
let myConf = {
|
|
user: dbConf.username,
|
|
password: dbConf.password,
|
|
host: dbConf.host,
|
|
port: dbConf.port
|
|
};
|
|
|
|
log('Waiting for MySQL init process...');
|
|
checker();
|
|
|
|
async function checker() {
|
|
elapsedTime += interval;
|
|
let state;
|
|
|
|
try {
|
|
let result = await execP(`docker container inspect -f "{{json .State}}" ${containerId}`);
|
|
state = JSON.parse(result.stdout);
|
|
} catch (err) {
|
|
return reject(new Error(err.message));
|
|
}
|
|
|
|
if (state.Status === 'exited')
|
|
return reject(new Error('Docker exited, please see the docker logs for more info'));
|
|
|
|
let conn = mysql.createConnection(myConf);
|
|
conn.on('error', () => {});
|
|
conn.connect(err => {
|
|
conn.destroy();
|
|
if (!err) {
|
|
log('MySQL process ready.');
|
|
return resolve();
|
|
}
|
|
|
|
if (elapsedTime >= maxInterval)
|
|
reject(new Error(`MySQL not initialized whithin ${elapsedTime / 1000} secs`));
|
|
else
|
|
setTimeout(checker, interval);
|
|
});
|
|
}
|
|
});
|
|
}
|
|
dockerWait.description = `Waits until database service is ready`;
|
|
|
|
// Helpers
|
|
|
|
/**
|
|
* Promisified version of exec().
|
|
*
|
|
* @param {String} command The exec command
|
|
* @return {Promise} The promise
|
|
*/
|
|
function execP(command) {
|
|
return new Promise((resolve, reject) => {
|
|
exec(command, (err, stdout, stderr) => {
|
|
if (err)
|
|
reject(err);
|
|
else {
|
|
resolve({
|
|
stdout: stdout,
|
|
stderr: stderr
|
|
});
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
module.exports = {
|
|
default: defaultTask,
|
|
front,
|
|
back,
|
|
backOnly,
|
|
backWatch,
|
|
backTestOnce,
|
|
backTestDockerOnce,
|
|
backTest,
|
|
backTestDocker,
|
|
e2e,
|
|
e2eOnly,
|
|
smokes,
|
|
smokesOnly,
|
|
i,
|
|
install,
|
|
build,
|
|
clean,
|
|
webpack,
|
|
webpackDevServer,
|
|
routes,
|
|
locales,
|
|
localesRoutes,
|
|
watch,
|
|
docker,
|
|
dockerStart,
|
|
dockerWait,
|
|
backendStatus
|
|
};
|