#4036 Fixes, refactor, generated fixtures, code clean

This commit is contained in:
Juan Ferrer 2022-12-21 13:34:17 +01:00
parent 578190458f
commit 313655ea12
31 changed files with 561 additions and 617 deletions

16
lib/command.js Normal file
View File

@ -0,0 +1,16 @@
/**
* Base class for MyVC commands.
*/
module.exports = class MyVCCommand {
get usage() {
return {};
}
get localOpts() {
return {};
}
async run(myvc, opts) {
throw new Error('run command not defined');
}
}

View File

@ -1,6 +1,6 @@
const spawn = require('child_process').spawn; const spawn = require('child_process').spawn;
const execFile = require('child_process').execFile; const execFile = require('child_process').execFile;
const camelToSnake = require('./lib').camelToSnake; const camelToSnake = require('./util').camelToSnake;
const docker = { const docker = {
async run(image, commandArgs, options, execOptions) { async run(image, commandArgs, options, execOptions) {
@ -9,7 +9,7 @@ const docker = {
: image; : image;
const execMode = options.detach ? 'exec' : 'spawn'; const execMode = options.detach ? 'exec' : 'spawn';
const child = await this.exec('run', const child = await this.command('run',
args, args,
options, options,
execMode, execMode,
@ -21,7 +21,7 @@ const docker = {
}, },
async build(url, options, execOptions) { async build(url, options, execOptions) {
return await this.exec('build', return await this.command('build',
url, url,
options, options,
'spawn', 'spawn',
@ -50,7 +50,7 @@ const docker = {
return await ct.inspect(options); return await ct.inspect(options);
}, },
async exec(command, args, options, execMode, execOptions) { async command(command, args, options, execMode, execOptions) {
const execArgs = [command]; const execArgs = [command];
if (options) if (options)
@ -106,21 +106,27 @@ class Container {
} }
async start(options) { async start(options) {
await docker.exec('start', this.id, options); await docker.command('start', this.id, options);
} }
async stop(options) { async stop(options) {
await docker.exec('stop', this.id, options); await docker.command('stop', this.id, options);
} }
async rm(options) { async rm(options) {
await docker.exec('rm', this.id, options); await docker.command('rm', this.id, options);
} }
async inspect(options) { async inspect(options) {
const child = await docker.exec('inspect', this.id, options); const child = await docker.command('inspect', this.id, options);
return JSON.parse(child.stdout); return JSON.parse(child.stdout);
} }
async exec(options, command, commandArgs, execMode, execOptions) {
let args = [this.id, command];
if (commandArgs) args = args.concat(commandArgs);
await docker.command('exec', args, options, execMode, execOptions);
}
} }
module.exports = docker; module.exports = docker;

112
lib/exporter-engine.js Normal file
View File

@ -0,0 +1,112 @@
const shajs = require('sha.js');
const fs = require('fs-extra');
const Exporter = require('./exporter');
module.exports = class ExporterEngine {
constructor(conn, myvcDir) {
this.conn = conn;
this.pullFile = `${myvcDir}/.pullinfo.json`;
this.exporters = [];
this.exporterMap = {};
}
async init () {
if (await fs.pathExists(this.pullFile)) {
this.pullInfo = JSON.parse(await fs.readFile(this.pullFile, 'utf8'));
const lastPull = this.pullInfo.lastPull;
if (lastPull)
this.pullInfo.lastPull = new Date(lastPull);
} else
this.pullInfo = {
lastPull: null,
shaSums: {}
};
this.shaSums = this.pullInfo.shaSums;
this.lastPull = this.pullInfo.lastPull;
this.infoChanged = false;
const types = [
'function',
'procedure',
'view',
'trigger',
'event'
];
for (const type of types) {
const exporter = new Exporter(this, type, this.conn);
await exporter.init();
this.exporters.push(exporter);
this.exporterMap[type] = exporter;
}
}
async fetchRoutine(type, schema, name) {
const exporter = this.exporterMap[type];
const [row] = await exporter.query(schema, name);
return row && exporter.format(row);
}
async fetchShaSum(type, schema, name) {
const sql = await this.fetchRoutine(type, schema, name);
this.setShaSum(type, schema, name, this.shaSum(sql));
}
shaSum(sql) {
if (!sql) return null;
return shajs('sha256')
.update(JSON.stringify(sql))
.digest('hex');
}
getShaSum(type, schema, name) {
try {
return this.shaSums[schema][type][name];
} catch (e) {};
return null;
}
setShaSum(type, schema, name, shaSum) {
if (!shaSum) {
this.deleteShaSum(type, schema, name);
return;
}
const shaSums = this.shaSums;
if (!shaSums[schema])
shaSums[schema] = {};
if (!shaSums[schema][type])
shaSums[schema][type] = {};
shaSums[schema][type][name] = shaSum;
this.infoChanged = true;
}
deleteShaSum(type, schema, name) {
try {
delete this.shaSums[schema][type][name];
this.infoChanged = true;
} catch (e) {};
}
deleteSchemaSums(schema) {
delete this.shaSums[schema];
this.infoChanged = true;
}
async refreshPullDate() {
const [[row]] = await this.conn.query(`SELECT NOW() now`);
this.pullInfo.lastPull = row.now;
this.infoChanged = true;
}
async saveInfo() {
if (!this.infoChanged) return;
await fs.writeFile(this.pullFile,
JSON.stringify(this.pullInfo, null, ' '));
this.infoChanged = false;
}
}

View File

@ -1,13 +1,8 @@
const ejs = require('ejs'); const ejs = require('ejs');
const shajs = require('sha.js');
const fs = require('fs-extra'); const fs = require('fs-extra');
function camelToSnake(str) { module.exports = class Exporter {
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
}
class Exporter {
constructor(engine, objectType, conn) { constructor(engine, objectType, conn) {
this.engine = engine; this.engine = engine;
this.objectType = objectType; this.objectType = objectType;
@ -16,7 +11,7 @@ class Exporter {
} }
async init() { async init() {
const templateDir = `${__dirname}/exporters/${this.objectType}`; const templateDir = `${__dirname}/../exporters/${this.objectType}`;
this.sql = await fs.readFile(`${templateDir}.sql`, 'utf8'); this.sql = await fs.readFile(`${templateDir}.sql`, 'utf8');
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8'); const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
@ -118,114 +113,3 @@ class Exporter {
return this.template(params); return this.template(params);
} }
} }
class ExporterEngine {
constructor(conn, myvcDir) {
this.conn = conn;
this.pullFile = `${myvcDir}/.pullinfo.json`;
this.exporters = [];
this.exporterMap = {};
}
async init () {
if (await fs.pathExists(this.pullFile)) {
this.pullInfo = JSON.parse(await fs.readFile(this.pullFile, 'utf8'));
const lastPull = this.pullInfo.lastPull;
if (lastPull)
this.pullInfo.lastPull = new Date(lastPull);
} else
this.pullInfo = {
lastPull: null,
shaSums: {}
};
this.shaSums = this.pullInfo.shaSums;
this.lastPull = this.pullInfo.lastPull;
this.infoChanged = false;
const types = [
'function',
'procedure',
'view',
'trigger',
'event'
];
for (const type of types) {
const exporter = new Exporter(this, type, this.conn);
await exporter.init();
this.exporters.push(exporter);
this.exporterMap[type] = exporter;
}
}
async fetchRoutine(type, schema, name) {
const exporter = this.exporterMap[type];
const [row] = await exporter.query(schema, name);
return row && exporter.format(row);
}
async fetchShaSum(type, schema, name) {
const sql = await this.fetchRoutine(type, schema, name);
this.setShaSum(type, schema, name, this.shaSum(sql));
}
shaSum(sql) {
if (!sql) return null;
return shajs('sha256')
.update(JSON.stringify(sql))
.digest('hex');
}
getShaSum(type, schema, name) {
try {
return this.shaSums[schema][type][name];
} catch (e) {};
return null;
}
setShaSum(type, schema, name, shaSum) {
if (!shaSum) {
this.deleteShaSum(type, schema, name);
return;
}
const shaSums = this.shaSums;
if (!shaSums[schema])
shaSums[schema] = {};
if (!shaSums[schema][type])
shaSums[schema][type] = {};
shaSums[schema][type][name] = shaSum;
this.infoChanged = true;
}
deleteShaSum(type, schema, name) {
try {
delete this.shaSums[schema][type][name];
this.infoChanged = true;
} catch (e) {};
}
deleteSchemaSums(schema) {
delete this.shaSums[schema];
this.infoChanged = true;
}
async refreshPullDate() {
const [[row]] = await this.conn.query(`SELECT NOW() now`);
this.pullInfo.lastPull = row.now;
this.infoChanged = true;
}
async saveInfo() {
if (!this.infoChanged) return;
await fs.writeFile(this.pullFile,
JSON.stringify(this.pullInfo, null, ' '));
this.infoChanged = false;
}
}
module.exports.camelToSnake = camelToSnake;
module.exports.Exporter = Exporter;
module.exports.ExporterEngine = ExporterEngine;

6
lib/util.js Normal file
View File

@ -0,0 +1,6 @@
function camelToSnake(str) {
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
}
module.exports.camelToSnake = camelToSnake;

View File

@ -1,33 +1,29 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
/** /**
* Cleans old applied versions. * Cleans old applied versions.
*/ */
class Clean { class Clean extends Command {
get usage() { static usage = {
return {
description: 'Cleans old applied versions' description: 'Cleans old applied versions'
}; };
}
get localOpts() { static localOpts = {
return {
default: { default: {
remote: 'production' remote: 'production'
} }
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
await myvc.dbConnect(); await myvc.dbConnect();
const version = await myvc.fetchDbVersion() || {}; const version = await myvc.fetchDbVersion() || {};
const number = version.number; const number = version.number;
const verionsDir =`${opts.myvcDir}/versions`;
const oldVersions = []; const oldVersions = [];
const versionDirs = await fs.readdir(verionsDir); const versionDirs = await fs.readdir(opts.versionsDir);
for (const versionDir of versionDirs) { for (const versionDir of versionDirs) {
const dirVersion = myvc.parseVersionDir(versionDir); const dirVersion = myvc.parseVersionDir(versionDir);
if (!dirVersion) continue; if (!dirVersion) continue;
@ -41,7 +37,7 @@ class Clean {
oldVersions.splice(-opts.maxOldVersions); oldVersions.splice(-opts.maxOldVersions);
for (const oldVersion of oldVersions) for (const oldVersion of oldVersions)
await fs.remove(`${verionsDir}/${oldVersion}`, await fs.remove(`${opts.versionsDir}/${oldVersion}`,
{recursive: true}); {recursive: true});
console.log(`Old versions deleted: ${oldVersions.length}`); console.log(`Old versions deleted: ${oldVersions.length}`);

View File

@ -1,23 +1,20 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
class Dump { class Dump extends Command {
get usage() { static usage = {
return {
description: 'Dumps structure and fixtures from remote', description: 'Dumps structure and fixtures from remote',
operand: 'remote' operand: 'remote'
}; };
}
get localOpts() { static localOpts = {
return {
default: { default: {
remote: 'production' remote: 'production'
} }
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const dumpStream = await myvc.initDump('.dump.sql'); const dumpStream = await myvc.initDump('.dump.sql');
@ -33,7 +30,7 @@ class Dump {
'--databases' '--databases'
]; ];
dumpArgs = dumpArgs.concat(opts.schemas); dumpArgs = dumpArgs.concat(opts.schemas);
await myvc.runDump('myvc-dump.sh', dumpArgs, dumpStream); await myvc.runDump('docker-dump.sh', dumpArgs, dumpStream);
console.log('Dumping fixtures.'); console.log('Dumping fixtures.');
await myvc.dumpFixtures(dumpStream, opts.fixtures); await myvc.dumpFixtures(dumpStream, opts.fixtures);
@ -60,9 +57,8 @@ class Dump {
await myvc.dbConnect(); await myvc.dbConnect();
const version = await myvc.fetchDbVersion(); const version = await myvc.fetchDbVersion();
if (version) { if (version) {
const dumpDir = path.join(opts.myvcDir, 'dump');
await fs.writeFile( await fs.writeFile(
`${dumpDir}/.dump.json`, `${opts.dumpDir}/.dump.json`,
JSON.stringify(version) JSON.stringify(version)
); );
} }

View File

@ -1,3 +0,0 @@
#!/bin/bash
mysqldump $@ | sed 's/ AUTO_INCREMENT=[0-9]* //g'

View File

@ -1,25 +1,22 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
class Fixtures { class Fixtures extends Command {
get usage() { static usage = {
return {
description: 'Dumps local fixtures from database', description: 'Dumps local fixtures from database',
operand: 'remote' operand: 'remote'
}; };
}
get localOpts() { static localOpts = {
return {
default: { default: {
remote: 'docker' remote: 'docker'
} }
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const dumpStream = await myvc.initDump('fixtures.sql'); const dumpStream = await myvc.initDump('fixtures.sql');
await myvc.dumpFixtures(dumpStream, opts.localFixtures); await myvc.dumpFixtures(dumpStream, opts.localFixtures, true);
await dumpStream.end(); await dumpStream.end();
} }
} }

View File

@ -1,13 +1,12 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
class Init { class Init extends Command {
get usage() { static usage = {
return {
description: 'Initialize an empty workspace' description: 'Initialize an empty workspace'
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const templateDir = `${__dirname}/template`; const templateDir = `${__dirname}/template`;

View File

@ -1,11 +1,11 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
const nodegit = require('nodegit'); const nodegit = require('nodegit');
const ExporterEngine = require('./lib').ExporterEngine; const ExporterEngine = require('./lib/exporter-engine');
class Pull { class Pull extends Command {
get usage() { static usage = {
return {
description: 'Incorporate database routine changes into workspace', description: 'Incorporate database routine changes into workspace',
params: { params: {
force: 'Do it even if there are local changes', force: 'Do it even if there are local changes',
@ -15,10 +15,8 @@ class Pull {
}, },
operand: 'remote' operand: 'remote'
}; };
}
get localOpts() { static localOpts = {
return {
alias: { alias: {
force: 'f', force: 'f',
checkout: 'c', checkout: 'c',
@ -32,7 +30,6 @@ class Pull {
'sums' 'sums'
] ]
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const conn = await myvc.dbConnect(); const conn = await myvc.dbConnect();
@ -91,32 +88,32 @@ class Pull {
await engine.init(); await engine.init();
const shaSums = engine.shaSums; const shaSums = engine.shaSums;
const exportDir = `${opts.myvcDir}/routines`; const routinesDir = opts.routinesDir;
if (!await fs.pathExists(exportDir)) if (!await fs.pathExists(routinesDir))
await fs.mkdir(exportDir); await fs.mkdir(routinesDir);
// Delete old schemas // Delete old schemas
const schemas = await fs.readdir(exportDir); const schemas = await fs.readdir(routinesDir);
for (const schema of schemas) { for (const schema of schemas) {
if (opts.schemas.indexOf(schema) == -1) if (opts.schemas.indexOf(schema) == -1)
await fs.remove(`${exportDir}/${schema}`, {recursive: true}); await fs.remove(`${routinesDir}/${schema}`, {recursive: true});
} }
for (const schema in shaSums) { for (const schema in shaSums) {
if (!await fs.pathExists(`${exportDir}/${schema}`)) if (!await fs.pathExists(`${routinesDir}/${schema}`))
engine.deleteSchemaSums(schema); engine.deleteSchemaSums(schema);
} }
// Export objects to SQL files // Export objects to SQL files
for (const schema of opts.schemas) { for (const schema of opts.schemas) {
let schemaDir = `${exportDir}/${schema}`; let schemaDir = `${routinesDir}/${schema}`;
if (!await fs.pathExists(schemaDir)) if (!await fs.pathExists(schemaDir))
await fs.mkdir(schemaDir); await fs.mkdir(schemaDir);
for (const exporter of engine.exporters) for (const exporter of engine.exporters)
await exporter.export(exportDir, await exporter.export(routinesDir,
schema, opts.update, opts.sums); schema, opts.update, opts.sums);
} }

View File

@ -1,15 +1,15 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
const nodegit = require('nodegit'); const nodegit = require('nodegit');
const ExporterEngine = require('./lib').ExporterEngine; const ExporterEngine = require('./lib/exporter-engine');
/** /**
* Pushes changes to remote. * Pushes changes to remote.
*/ */
class Push { class Push extends Command {
get usage() { static usage = {
return {
description: 'Apply changes into database', description: 'Apply changes into database',
params: { params: {
force: 'Answer yes to all questions', force: 'Answer yes to all questions',
@ -18,10 +18,8 @@ class Push {
}, },
operand: 'remote' operand: 'remote'
}; };
}
get localOpts() { static localOpts = {
return {
alias: { alias: {
force: 'f', force: 'f',
commit: 'c', commit: 'c',
@ -33,7 +31,6 @@ class Push {
'sums' 'sums'
] ]
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const conn = await myvc.dbConnect(); const conn = await myvc.dbConnect();
@ -132,7 +129,7 @@ class Push {
let nChanges = 0; let nChanges = 0;
let silent = true; let silent = true;
const versionsDir = `${opts.myvcDir}/versions`; const versionsDir = opts.versionsDir;
function logVersion(version, name, error) { function logVersion(version, name, error) {
console.log('', version.bold, name); console.log('', version.bold, name);
@ -217,7 +214,7 @@ class Push {
let err; let err;
try { try {
await this.queryFromFile(pushConn, await myvc.queryFromFile(pushConn,
`${scriptsDir}/${script}`); `${scriptsDir}/${script}`);
} catch (e) { } catch (e) {
err = e; err = e;
@ -261,9 +258,7 @@ class Push {
const gitExists = await fs.pathExists(`${opts.workspace}/.git`); const gitExists = await fs.pathExists(`${opts.workspace}/.git`);
let nRoutines = 0; let nRoutines = 0;
let changes = gitExists let changes = await myvc.changedRoutines(version.gitCommit);
? await myvc.changedRoutines(version.gitCommit)
: await myvc.cachedChanges();
changes = this.parseChanges(changes); changes = this.parseChanges(changes);
const routines = []; const routines = [];
@ -305,7 +300,7 @@ class Push {
const schema = change.schema; const schema = change.schema;
const name = change.name; const name = change.name;
const type = change.type.name.toLowerCase(); const type = change.type.name.toLowerCase();
const fullPath = `${opts.myvcDir}/routines/${change.path}.sql`; const fullPath = `${opts.routinesDir}/${change.path}.sql`;
const exists = await fs.pathExists(fullPath); const exists = await fs.pathExists(fullPath);
let newSql; let newSql;
@ -333,7 +328,7 @@ class Push {
if (change.type.name === 'VIEW') if (change.type.name === 'VIEW')
await pushConn.query(`USE ${scapedSchema}`); await pushConn.query(`USE ${scapedSchema}`);
await this.multiQuery(pushConn, newSql); await myvc.multiQuery(pushConn, newSql);
if (change.isRoutine) { if (change.isRoutine) {
await conn.query( await conn.query(
@ -415,104 +410,6 @@ class Push {
); );
} }
/**
* Executes a multi-query string.
*
* @param {Connection} conn MySQL connection object
* @param {String} sql SQL multi-query string
* @returns {Array<Result>} The resultset
*/
async multiQuery(conn, sql) {
let results = [];
const stmts = this.querySplit(sql);
for (const stmt of stmts)
results = results.concat(await conn.query(stmt));
return results;
}
/**
* Executes an SQL script.
*
* @param {Connection} conn MySQL connection object
* @returns {Array<Result>} The resultset
*/
async queryFromFile(conn, file) {
const sql = await fs.readFile(file, 'utf8');
return await this.multiQuery(conn, sql);
}
/**
* Splits an SQL muti-query into a single-query array, it does an small
* parse to correctly handle the DELIMITER statement.
*
* @param {Array<String>} stmts The splitted SQL statements
*/
querySplit(sql) {
const stmts = [];
let i,
char,
token,
escaped,
stmtStart;
let delimiter = ';';
const delimiterRe = /\s*delimiter\s+(\S+)[^\S\r\n]*(?:\r?\n|\r|$)/yi;
function begins(str) {
let j;
for (j = 0; j < str.length; j++)
if (sql[i + j] != str[j])
return false;
i += j;
return true;
}
for (i = 0; i < sql.length;) {
stmtStart = i;
delimiterRe.lastIndex = i;
const match = sql.match(delimiterRe);
if (match) {
delimiter = match[1];
i += match[0].length;
continue;
}
let delimiterFound = false;
while (i < sql.length) {
char = sql[i];
if (token) {
if (!escaped && begins(token.end))
token = null;
else {
escaped = !escaped && token.escape(char);
i++;
}
} else {
delimiterFound = begins(delimiter);
if (delimiterFound) break;
const tok = tokenIndex.get(char);
if (tok && begins(tok.start))
token = tok;
else
i++;
}
}
let len = i - stmtStart;
if (delimiterFound) len -= delimiter.length;
const stmt = sql.substr(stmtStart, len);
if (!/^\s*$/.test(stmt))
stmts.push(stmt);
}
return stmts;
}
} }
const typeMap = { const typeMap = {
@ -572,40 +469,6 @@ class Routine {
} }
} }
const tokens = {
string: {
start: '\'',
end: '\'',
escape: char => char == '\'' || char == '\\'
},
quotedString: {
start: '"',
end: '"',
escape: char => char == '"' || char == '\\'
},
id: {
start: '`',
end: '`',
escape: char => char == '`'
},
multiComment: {
start: '/*',
end: '*/',
escape: () => false
},
singleComment: {
start: '-- ',
end: '\n',
escape: () => false
}
};
const tokenIndex = new Map();
for (const tokenId in tokens) {
const token = tokens[tokenId];
tokenIndex.set(token.start[0], token);
}
module.exports = Push; module.exports = Push;
if (require.main === module) if (require.main === module)

View File

@ -1,10 +1,11 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const docker = require('./docker'); const Command = require('./lib/command');
const Container = require('./docker').Container; const Push = require('./myvc-push');
const docker = require('./lib/docker');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const Server = require('./server/server'); const Server = require('./lib/server');
/** /**
* Builds the database image and runs a container. It only rebuilds the * Builds the database image and runs a container. It only rebuilds the
@ -12,19 +13,16 @@ const Server = require('./server/server');
* image was built is different to today. Some workarounds have been used * image was built is different to today. Some workarounds have been used
* to avoid a bug with OverlayFS driver on MacOS. * to avoid a bug with OverlayFS driver on MacOS.
*/ */
class Run { class Run extends Command {
get usage() { static usage = {
return {
description: 'Build and start local database server container', description: 'Build and start local database server container',
params: { params: {
ci: 'Workaround for continuous integration system', ci: 'Workaround for continuous integration system',
random: 'Whether to use a random container name or port' random: 'Whether to use a random container name or port'
} }
}; };
}
get localOpts() { static localOpts = {
return {
alias: { alias: {
ci: 'c', ci: 'c',
random: 'r' random: 'r'
@ -34,43 +32,14 @@ class Run {
'random' 'random'
] ]
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const dumpDir = `${opts.myvcDir}/dump`; const dumpDir = opts.dumpDir;
const serverDir = path.join(__dirname, 'server'); const serverDir = path.join(__dirname, 'server');
// Fetch dump information
if (!await fs.pathExists(`${dumpDir}/.dump.sql`)) if (!await fs.pathExists(`${dumpDir}/.dump.sql`))
throw new Error('To run local database you have to create a dump first'); throw new Error('To run local database you have to create a dump first');
const dumpInfo = `${dumpDir}/.dump.json`;
if (await fs.pathExists(dumpInfo)) {
const cache = await myvc.cachedChanges();
const version = JSON.parse(
await fs.readFileSync(dumpInfo, 'utf8')
);
const changes = await myvc.changedRoutines(version.gitCommit);
let isEqual = false;
if (cache && changes && cache.length == changes.length)
for (let i = 0; i < changes.length; i++) {
isEqual = cache[i].path == changes[i].path
&& cache[i].mark == changes[i].mark;
if (!isEqual) break;
}
if (!isEqual) {
const fd = await fs.open(`${dumpDir}/.changes`, 'w+');
for (const change of changes)
await fs.write(fd, change.mark + change.path + '\n');
await fs.close(fd);
}
}
// Build base server image // Build base server image
let serverDockerfile = path.join(dumpDir, 'Dockerfile'); let serverDockerfile = path.join(dumpDir, 'Dockerfile');
@ -119,7 +88,7 @@ class Run {
publish: `3306:${dbConfig.port}` publish: `3306:${dbConfig.port}`
}; };
try { try {
const server = new Server(new Container(opts.code)); const server = new Server(new docker.Container(opts.code));
await server.rm(); await server.rm();
} catch (e) {} } catch (e) {}
} }
@ -128,13 +97,14 @@ class Run {
Object.assign(runOptions, null, { Object.assign(runOptions, null, {
env: `RUN_CHOWN=${runChown}`, env: `RUN_CHOWN=${runChown}`,
detach: true detach: true,
volume: `${path.join(dumpDir, 'fixtures.sql')}:/fixtures.sql:ro`
}); });
const ct = await docker.run(opts.code, null, runOptions); const ct = await docker.run(opts.code, null, runOptions);
const server = new Server(ct, dbConfig); const server = new Server(ct, dbConfig);
try {
if (isRandom) { if (isRandom) {
try {
const netSettings = await ct.inspect({ const netSettings = await ct.inspect({
format: '{{json .NetworkSettings}}' format: '{{json .NetworkSettings}}'
}); });
@ -143,14 +113,43 @@ class Run {
dbConfig.host = netSettings.Gateway; dbConfig.host = netSettings.Gateway;
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort; dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
}
} catch (err) { } catch (err) {
if (isRandom)
await server.rm(); await server.rm();
throw err; throw err;
} }
}
await server.wait(); await server.wait();
// Apply changes
Object.assign(opts, {
commit: true,
dbConfig
});
await myvc.runCommand(Push, opts);
// Apply fixtures
console.log('Applying fixtures.');
await ct.exec(null,
'docker-import.sh', ['/fixtures'], 'spawn', opts.debug);
// Create triggers
console.log('Creating triggers.');
const conn = await myvc.createConnection();
for (const schema of opts.schemas) {
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
if (!await fs.pathExists(triggersPath))
continue;
const triggersDir = await fs.readdir(triggersPath);
for (const triggerFile of triggersDir)
await myvc.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
}
return server; return server;
} }
} }

View File

@ -1,7 +1,8 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Container = require('./docker').Container; const Command = require('./lib/command');
const Server = require('./server/server'); const Container = require('./lib/docker').Container;
const Server = require('./lib/server');
const Run = require('./myvc-run'); const Run = require('./myvc-run');
/** /**
@ -10,12 +11,10 @@ const Run = require('./myvc-run');
* mind that when you do not rebuild the docker you may be using an outdated * mind that when you do not rebuild the docker you may be using an outdated
* version of it. * version of it.
*/ */
class Start { class Start extends Command {
get usage() { static usage = {
return {
description: 'Start local database server container' description: 'Start local database server container'
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
const ct = new Container(opts.code); const ct = new Container(opts.code);

View File

@ -1,23 +1,21 @@
const MyVC = require('./myvc'); const MyVC = require('./myvc');
const Command = require('./lib/command');
const fs = require('fs-extra'); const fs = require('fs-extra');
/** /**
* Creates a new version. * Creates a new version.
*/ */
class Version { class Version extends Command {
get usage() { static usage = {
return {
description: 'Creates a new version', description: 'Creates a new version',
params: { params: {
name: 'Name for the new version' name: 'Name for the new version'
}, },
operand: 'name' operand: 'name'
}; };
}
get localOpts() { static localOpts = {
return {
alias: { alias: {
name: 'n' name: 'n'
}, },
@ -28,11 +26,9 @@ class Version {
remote: 'production' remote: 'production'
} }
}; };
}
async run(myvc, opts) { async run(myvc, opts) {
let newVersionDir; let newVersionDir;
const verionsDir =`${opts.myvcDir}/versions`;
// Fetch last version number // Fetch last version number
@ -77,7 +73,7 @@ class Version {
let versionName = opts.name; let versionName = opts.name;
const versionNames = new Set(); const versionNames = new Set();
const versionDirs = await fs.readdir(verionsDir); const versionDirs = await fs.readdir(opts.versionsDir);
for (const versionDir of versionDirs) { for (const versionDir of versionDirs) {
const dirVersion = myvc.parseVersionDir(versionDir); const dirVersion = myvc.parseVersionDir(versionDir);
if (!dirVersion) continue; if (!dirVersion) continue;
@ -107,7 +103,7 @@ class Version {
// Create version // Create version
const versionFolder = `${newVersion}-${versionName}`; const versionFolder = `${newVersion}-${versionName}`;
newVersionDir = `${verionsDir}/${versionFolder}`; newVersionDir = `${opts.versionsDir}/${versionFolder}`;
await conn.query( await conn.query(
`INSERT INTO version `INSERT INTO version

256
myvc.js
View File

@ -9,17 +9,13 @@ const ini = require('ini');
const path = require('path'); const path = require('path');
const mysql = require('mysql2/promise'); const mysql = require('mysql2/promise');
const nodegit = require('nodegit'); const nodegit = require('nodegit');
const camelToSnake = require('./lib').camelToSnake; const camelToSnake = require('./lib/util').camelToSnake;
const docker = require('./docker'); const docker = require('./lib/docker');
const Command = require('./lib/command');
class MyVC { class MyVC {
async run(command) { get usage() {
console.log( return {
'MyVC (MySQL Version Control)'.green,
`v${packageJson.version}`.magenta
);
const usage = {
description: 'Utility for database versioning', description: 'Utility for database versioning',
params: { params: {
remote: 'Name of remote to use', remote: 'Name of remote to use',
@ -30,7 +26,10 @@ class MyVC {
help: 'Display this help message' help: 'Display this help message'
} }
}; };
const baseOpts = { }
get localOpts() {
return {
alias: { alias: {
remote: 'r', remote: 'r',
workspace: 'w', workspace: 'w',
@ -48,6 +47,15 @@ class MyVC {
workspace: process.cwd() workspace: process.cwd()
} }
}; };
}
async run(CommandClass) {
console.log(
'MyVC (MySQL Version Control)'.green,
`v${packageJson.version}`.magenta
);
const baseOpts = this.localOpts;
const opts = this.getopts(baseOpts); const opts = this.getopts(baseOpts);
if (opts.debug) { if (opts.debug) {
@ -60,37 +68,28 @@ class MyVC {
try { try {
const commandName = opts._[0]; const commandName = opts._[0];
if (!command && commandName) { if (!CommandClass && commandName) {
const commands = [ if (!/^[a-z]+$/.test(commandName))
'init', throw new Error (`Invalid command name '${commandName}'`);
'pull',
'push',
'version',
'clean',
'dump',
'fixtures',
'start',
'run'
];
if (commands.indexOf(commandName) == -1) const commandFile = path.join(__dirname, `myvc-${commandName}.js`);
if (!await fs.pathExists(commandFile))
throw new Error (`Unknown command '${commandName}'`); throw new Error (`Unknown command '${commandName}'`);
CommandClass = require(commandFile);
const Klass = require(`./myvc-${commandName}`);
command = new Klass();
} }
if (!command) { if (!CommandClass) {
this.showHelp(baseOpts, usage); this.showHelp(baseOpts, this.usage);
process.exit(0); process.exit(0);
} }
const allOpts = Object.assign({}, baseOpts); const allOpts = Object.assign({}, baseOpts);
if (command.localOpts) if (CommandClass.localOpts)
for (const key in command.localOpts) { for (const key in CommandClass.localOpts) {
const baseValue = baseOpts[key]; const baseValue = baseOpts[key];
const cmdValue = command.localOpts[key]; const cmdValue = CommandClass.localOpts[key];
if (Array.isArray(baseValue)) if (Array.isArray(baseValue))
allOpts[key] = baseValue.concat(cmdValue); allOpts[key] = baseValue.concat(cmdValue);
else if (typeof baseValue == 'object') else if (typeof baseValue == 'object')
@ -104,7 +103,7 @@ class MyVC {
console.log('Command options:'.magenta, commandOpts); console.log('Command options:'.magenta, commandOpts);
Object.assign(opts, commandOpts); Object.assign(opts, commandOpts);
const operandToOpt = command.usage.operand; const operandToOpt = CommandClass.usage.operand;
if (opts._.length >= 2 && operandToOpt) if (opts._.length >= 2 && operandToOpt)
opts[operandToOpt] = opts._[1]; opts[operandToOpt] = opts._[1];
@ -112,7 +111,7 @@ class MyVC {
console.log('Final options:'.magenta, opts); console.log('Final options:'.magenta, opts);
if (opts.help) { if (opts.help) {
this.showHelp(command.localOpts, command.usage, commandName); this.showHelp(CommandClass.localOpts, CommandClass.usage, commandName);
process.exit(0); process.exit(0);
} }
@ -151,8 +150,7 @@ class MyVC {
parameter('Remote:', opts.remote || 'local'); parameter('Remote:', opts.remote || 'local');
await this.load(opts); await this.load(opts);
command.opts = opts; await this.runCommand(CommandClass, opts);
await command.run(this, opts);
await this.unload(); await this.unload();
} catch (err) { } catch (err) {
if (err.name == 'Error' && !opts.debug) { if (err.name == 'Error' && !opts.debug) {
@ -171,10 +169,16 @@ class MyVC {
process.exit(); process.exit();
} }
async runCommand(CommandClass, opts) {
const command = new CommandClass();
command.opts = opts;
await command.run(this, opts);
}
async load(opts) { async load(opts) {
// Configuration file // Configuration file
const config = require(`${__dirname}/myvc.default.yml`); const config = require(`${__dirname}/assets/myvc.default.yml`);
const configFile = 'myvc.config.yml'; const configFile = 'myvc.config.yml';
const configPath = path.join(opts.workspace, configFile); const configPath = path.join(opts.workspace, configFile);
@ -187,9 +191,13 @@ class MyVC {
if (!opts.myvcDir) if (!opts.myvcDir)
opts.myvcDir = path.join(opts.workspace, opts.subdir || ''); opts.myvcDir = path.join(opts.workspace, opts.subdir || '');
opts.routinesDir = path.join(opts.myvcDir, 'routines');
opts.versionsDir = path.join(opts.myvcDir, 'versions');
opts.dumpDir = path.join(opts.myvcDir, 'dump');
// Database configuration // Database configuration
let iniDir = __dirname; let iniDir = path.join(__dirname, 'assets');
let iniFile = 'db.ini'; let iniFile = 'db.ini';
if (opts.remote) { if (opts.remote) {
@ -277,7 +285,7 @@ class MyVC {
if (!res.tableExists) { if (!res.tableExists) {
const structure = await fs.readFile( const structure = await fs.readFile(
`${__dirname}/structure.sql`, 'utf8'); `${__dirname}/assets/structure.sql`, 'utf8');
await conn.query(structure); await conn.query(structure);
} }
} }
@ -402,32 +410,8 @@ class MyVC {
}); });
} }
async cachedChanges() {
const dumpDir = path.join(this.opts.myvcDir, 'dump');
const dumpChanges = path.join(dumpDir, '.changes');
if (!await fs.pathExists(dumpChanges))
return null;
const readline = require('readline');
const rl = readline.createInterface({
input: fs.createReadStream(dumpChanges),
//output: process.stdout,
console: false
});
const changes = [];
for await (const line of rl) {
changes.push({
mark: line.charAt(0),
path: line.substring(1)
});
}
return changes;
}
async initDump(dumpFile) { async initDump(dumpFile) {
const dumpDir = path.join(this.opts.myvcDir, 'dump'); const dumpDir = this.opts.dumpDir;
if (!await fs.pathExists(dumpDir)) if (!await fs.pathExists(dumpDir))
await fs.mkdir(dumpDir); await fs.mkdir(dumpDir);
@ -445,12 +429,21 @@ class MyVC {
return dumpStream; return dumpStream;
} }
async dumpFixtures(dumpStream, tables) { async dumpFixtures(dumpStream, tables, replace) {
const fixturesArgs = [ const fixturesArgs = [
'--no-create-info', '--no-create-info',
'--skip-triggers', '--skip-triggers',
'--insert-ignore' '--skip-extended-insert',
'--skip-disable-keys',
'--skip-add-locks',
'--skip-set-charset',
'--skip-comments',
'--skip-tz-utc'
]; ];
if (replace)
fixturesArgs.push('--replace');
for (const schema in tables) { for (const schema in tables) {
const escapedSchema = '`'+ schema.replace('`', '``') +'`'; const escapedSchema = '`'+ schema.replace('`', '``') +'`';
await dumpStream.write( await dumpStream.write(
@ -512,6 +505,139 @@ class MyVC {
} }
} }
} }
/**
* Executes an SQL script.
*
* @param {Connection} conn MySQL connection object
* @returns {Array<Result>} The resultset
*/
async queryFromFile(conn, file) {
const sql = await fs.readFile(file, 'utf8');
return await this.multiQuery(conn, sql);
}
/**
* Executes a multi-query string.
*
* @param {Connection} conn MySQL connection object
* @param {String} sql SQL multi-query string
* @returns {Array<Result>} The resultset
*/
async multiQuery(conn, sql) {
let results = [];
const stmts = this.querySplit(sql);
for (const stmt of stmts)
results = results.concat(await conn.query(stmt));
return results;
}
/**
* Splits an SQL muti-query into a single-query array, it does an small
* parse to correctly handle the DELIMITER statement.
*
* @param {Array<String>} stmts The splitted SQL statements
*/
querySplit(sql) {
const stmts = [];
let i,
char,
token,
escaped,
stmtStart;
let delimiter = ';';
const delimiterRe = /\s*delimiter\s+(\S+)[^\S\r\n]*(?:\r?\n|\r|$)/yi;
function begins(str) {
let j;
for (j = 0; j < str.length; j++)
if (sql[i + j] != str[j])
return false;
i += j;
return true;
}
for (i = 0; i < sql.length;) {
stmtStart = i;
delimiterRe.lastIndex = i;
const match = sql.match(delimiterRe);
if (match) {
delimiter = match[1];
i += match[0].length;
continue;
}
let delimiterFound = false;
while (i < sql.length) {
char = sql[i];
if (token) {
if (!escaped && begins(token.end))
token = null;
else {
escaped = !escaped && token.escape(char);
i++;
}
} else {
delimiterFound = begins(delimiter);
if (delimiterFound) break;
const tok = tokenIndex.get(char);
if (tok && begins(tok.start))
token = tok;
else
i++;
}
}
let len = i - stmtStart;
if (delimiterFound) len -= delimiter.length;
const stmt = sql.substr(stmtStart, len);
if (!/^\s*$/.test(stmt))
stmts.push(stmt);
}
return stmts;
}
}
const tokens = {
string: {
start: '\'',
end: '\'',
escape: char => char == '\'' || char == '\\'
},
quotedString: {
start: '"',
end: '"',
escape: char => char == '"' || char == '\\'
},
id: {
start: '`',
end: '`',
escape: char => char == '`'
},
multiComment: {
start: '/*',
end: '*/',
escape: () => false
},
singleComment: {
start: '-- ',
end: '\n',
escape: () => false
}
};
const tokenIndex = new Map();
for (const tokenId in tokens) {
const token = tokens[tokenId];
tokenIndex.set(token.start[0], token);
} }
module.exports = MyVC; module.exports = MyVC;

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "myvc", "name": "myvc",
"version": "1.4.19", "version": "1.5.0",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "myvc", "name": "myvc",
"version": "1.4.19", "version": "1.5.0",
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"@sqltools/formatter": "^1.2.3", "@sqltools/formatter": "^1.2.3",

View File

@ -1,6 +1,6 @@
{ {
"name": "myvc", "name": "myvc",
"version": "1.4.19", "version": "1.5.0",
"author": "Verdnatura Levante SL", "author": "Verdnatura Levante SL",
"description": "MySQL Version Control", "description": "MySQL Version Control",
"license": "GPL-3.0", "license": "GPL-3.0",

View File

@ -7,5 +7,8 @@ RUN apt-get update \
libmariadb3 \ libmariadb3 \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY myvc-dump.sh /usr/local/bin/ COPY \
server/docker-dump.sh \
server/docker-fixtures.sh \
/usr/local/bin/
WORKDIR /workspace WORKDIR /workspace

View File

@ -7,21 +7,9 @@ COPY \
dump/beforeDump.sql \ dump/beforeDump.sql \
dump/afterDump.sql \ dump/afterDump.sql \
dump/ dump/
COPY myvc.config.yml \
./
RUN gosu mysql docker-init.sh RUN gosu mysql docker-init.sh
COPY routines routines
COPY versions versions
COPY \
dump/fixtures.sql \
dump/.changes \
dump/
ARG STAMP=unknown
RUN gosu mysql docker-push.sh
RUN echo "[LOG] Import finished." \ RUN echo "[LOG] Import finished." \
&& rm -rf /workspace && rm -rf /workspace

View File

@ -3,44 +3,15 @@ FROM myvc/base
USER root USER root
ENV MYSQL_ROOT_PASSWORD root ENV MYSQL_ROOT_PASSWORD root
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
curl \
&& curl -sL https://deb.nodesource.com/setup_14.x | bash - \
&& apt-get install -y --no-install-recommends \
nodejs \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /mysql-data \ RUN mkdir /mysql-data \
&& chown -R mysql:mysql /mysql-data && chown -R mysql:mysql /mysql-data
WORKDIR /myvc
COPY \
package.json \
./
RUN npm install --only=prod
COPY \
structure.sql \
myvc.js \
myvc-push.js \
lib.js \
docker.js \
myvc.default.yml \
db.ini \
./
COPY exporters exporters
RUN ln -s /myvc/myvc.js /usr/local/bin/myvc
WORKDIR /workspace WORKDIR /workspace
COPY server/docker.cnf /etc/mysql/conf.d/ COPY server/docker.cnf /etc/mysql/conf.d/
COPY \ COPY \
server/docker-init.sh \ server/docker-init.sh \
server/docker-push.sh \ server/docker-import.sh \
server/docker-dump.sh \
server/docker-start.sh \ server/docker-start.sh \
/usr/local/bin/ /usr/local/bin/

View File

@ -1,9 +1,4 @@
#!/bin/bash #!/bin/bash
FILE="$1.sql" # FIXME: It can corrupt data
mysqldump $@ | sed 's/ AUTO_INCREMENT=[0-9]* //g'
#if [ -f "$FILE" ]; then
echo "[LOG] -> Importing $FILE"
export MYSQL_PWD=root
mysql -u root --default-character-set=utf8 --comments -f < "$FILE"
#fi

4
server/docker-fixtures.sh Executable file
View File

@ -0,0 +1,4 @@
#!/bin/bash
# FIXME: It can corrupt data
mysqldump $@ | sed -E 's/(VALUES |\),)\(/\1\n\t\(/g'

6
server/docker-import.sh Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash
FILE="$1.sql"
echo "[LOG] -> Importing $FILE"
export MYSQL_PWD=root
mysql -u root --default-character-set=utf8 --comments -f < "$FILE"

View File

@ -13,8 +13,8 @@ docker_temp_server_start "$CMD"
docker_setup_db docker_setup_db
docker_process_init_files /docker-entrypoint-initdb.d/* docker_process_init_files /docker-entrypoint-initdb.d/*
docker-dump.sh dump/beforeDump docker-import.sh dump/beforeDump
docker-dump.sh dump/.dump docker-import.sh dump/.dump
docker-dump.sh dump/afterDump docker-import.sh dump/afterDump
docker_temp_server_stop docker_temp_server_stop

View File

@ -1,12 +0,0 @@
#!/bin/bash
. /usr/local/bin/docker-entrypoint.sh
CMD=mysqld
docker_setup_env "$CMD"
docker_temp_server_start "$CMD"
myvc push --socket --commit
docker-dump.sh dump/fixtures
docker_temp_server_stop

View File

@ -8,6 +8,6 @@
"type": "git" "type": "git"
}, },
"dependencies": { "dependencies": {
"myvc": "^1.4.19" "myvc": "^1.5.0"
} }
} }