refs #4036 Myt: class code clean/refactor, template fix
This commit is contained in:
parent
0cb3ec8573
commit
9f4a01ad97
|
@ -2,12 +2,9 @@
|
|||
* Base class for Myt commands.
|
||||
*/
|
||||
module.exports = class MytCommand {
|
||||
get usage() {
|
||||
return {};
|
||||
}
|
||||
|
||||
get localOpts() {
|
||||
return {};
|
||||
constructor(myt, opts) {
|
||||
this.myt = myt;
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
async run(myt, opts) {
|
||||
|
|
|
@ -0,0 +1,141 @@
|
|||
const fs = require('fs-extra');
|
||||
|
||||
/**
|
||||
* Executes an SQL script.
|
||||
*
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async function queryFromFile(conn, file) {
|
||||
const sql = await fs.readFile(file, 'utf8');
|
||||
return await this.multiQuery(conn, sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a multi-query string.
|
||||
*
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @param {String} sql SQL multi-query string
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async function multiQuery(conn, sql) {
|
||||
let results = [];
|
||||
const stmts = this.querySplit(sql);
|
||||
|
||||
for (const stmt of stmts)
|
||||
results = results.concat(await conn.query(stmt));
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits an SQL muti-query into a single-query array, it does an small
|
||||
* parse to correctly handle the DELIMITER statement.
|
||||
*
|
||||
* @param {Array<String>} stmts The splitted SQL statements
|
||||
*/
|
||||
function querySplit(sql) {
|
||||
const stmts = [];
|
||||
let i,
|
||||
char,
|
||||
token,
|
||||
escaped,
|
||||
stmtStart;
|
||||
|
||||
let delimiter = ';';
|
||||
const delimiterRe = /\s*delimiter\s+(\S+)[^\S\r\n]*(?:\r?\n|\r|$)/yi;
|
||||
|
||||
function begins(str) {
|
||||
let j;
|
||||
for (j = 0; j < str.length; j++)
|
||||
if (sql[i + j] != str[j])
|
||||
return false;
|
||||
i += j;
|
||||
return true;
|
||||
}
|
||||
|
||||
for (i = 0; i < sql.length;) {
|
||||
stmtStart = i;
|
||||
|
||||
delimiterRe.lastIndex = i;
|
||||
const match = sql.match(delimiterRe);
|
||||
if (match) {
|
||||
delimiter = match[1];
|
||||
i += match[0].length;
|
||||
continue;
|
||||
}
|
||||
|
||||
let delimiterFound = false;
|
||||
while (i < sql.length) {
|
||||
char = sql[i];
|
||||
|
||||
if (token) {
|
||||
if (!escaped && begins(token.end))
|
||||
token = null;
|
||||
else {
|
||||
escaped = !escaped && token.escape(char);
|
||||
i++;
|
||||
}
|
||||
} else {
|
||||
delimiterFound = begins(delimiter);
|
||||
if (delimiterFound) break;
|
||||
|
||||
const tok = tokenIndex.get(char);
|
||||
if (tok && begins(tok.start))
|
||||
token = tok;
|
||||
else
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
let len = i - stmtStart;
|
||||
if (delimiterFound) len -= delimiter.length;
|
||||
const stmt = sql.substr(stmtStart, len);
|
||||
|
||||
if (!/^\s*$/.test(stmt))
|
||||
stmts.push(stmt);
|
||||
}
|
||||
|
||||
return stmts;
|
||||
}
|
||||
|
||||
const tokens = {
|
||||
string: {
|
||||
start: '\'',
|
||||
end: '\'',
|
||||
escape: char => char == '\'' || char == '\\'
|
||||
},
|
||||
quotedString: {
|
||||
start: '"',
|
||||
end: '"',
|
||||
escape: char => char == '"' || char == '\\'
|
||||
},
|
||||
id: {
|
||||
start: '`',
|
||||
end: '`',
|
||||
escape: char => char == '`'
|
||||
},
|
||||
multiComment: {
|
||||
start: '/*',
|
||||
end: '*/',
|
||||
escape: () => false
|
||||
},
|
||||
singleComment: {
|
||||
start: '-- ',
|
||||
end: '\n',
|
||||
escape: () => false
|
||||
}
|
||||
};
|
||||
|
||||
const tokenIndex = new Map();
|
||||
for (const tokenId in tokens) {
|
||||
const token = tokens[tokenId];
|
||||
tokenIndex.set(token.start[0], token);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
queryFromFile,
|
||||
multiQuery,
|
||||
querySplit,
|
||||
tokens
|
||||
};
|
|
@ -0,0 +1,83 @@
|
|||
const docker = require('./docker');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = class Dumper {
|
||||
constructor(opts) {
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
async init(dumpFile) {
|
||||
const dumpDir = this.opts.dumpDir;
|
||||
if (!await fs.pathExists(dumpDir))
|
||||
await fs.mkdir(dumpDir);
|
||||
|
||||
const dumpPath = path.join(dumpDir, dumpFile);
|
||||
|
||||
// FIXME: If it's called after docker.build() statement it creates an
|
||||
// "invalid" WriteStream
|
||||
const dumpStream = await fs.createWriteStream(dumpPath);
|
||||
|
||||
const buidDir = path.join(__dirname, '..',)
|
||||
await docker.build(buidDir, {
|
||||
tag: 'myt/client',
|
||||
file: path.join(buidDir, 'server', 'Dockerfile.client')
|
||||
}, this.opts.debug);
|
||||
|
||||
this.dumpStream = dumpStream;
|
||||
}
|
||||
|
||||
async use(schema) {
|
||||
const escapedSchema = '`'+ schema.replace('`', '``') +'`';
|
||||
await this.dumpStream.write(
|
||||
`USE ${escapedSchema};\n`,
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
async dumpFixtures(tables, replace) {
|
||||
const fixturesArgs = [
|
||||
'--no-create-info',
|
||||
'--skip-triggers',
|
||||
'--skip-extended-insert',
|
||||
'--skip-disable-keys',
|
||||
'--skip-add-locks',
|
||||
'--skip-set-charset',
|
||||
'--skip-comments',
|
||||
'--skip-tz-utc'
|
||||
];
|
||||
|
||||
if (replace)
|
||||
fixturesArgs.push('--replace');
|
||||
|
||||
for (const schema in tables) {
|
||||
await this.use(schema);
|
||||
const args = fixturesArgs.concat([schema], tables[schema]);
|
||||
await this.runDump('mysqldump', args, this.dumpStream);
|
||||
}
|
||||
}
|
||||
|
||||
async runDump(command, args) {
|
||||
const iniPath = path.join(this.opts.subdir || '', 'remotes', this.opts.iniFile);
|
||||
const myArgs = [
|
||||
`--defaults-file=${iniPath}`
|
||||
];
|
||||
const execOptions = {
|
||||
stdio: [
|
||||
process.stdin,
|
||||
this.dumpStream,
|
||||
process.stderr
|
||||
]
|
||||
};
|
||||
const commandArgs = [command].concat(myArgs, args);
|
||||
await docker.run('myt/client', commandArgs, {
|
||||
addHost: 'host.docker.internal:host-gateway',
|
||||
volume: `${this.opts.mytDir}:/workspace`,
|
||||
rm: true
|
||||
}, execOptions);
|
||||
}
|
||||
|
||||
async end() {
|
||||
await this.dumpStream.end();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
const nodegit = require('nodegit');
|
||||
|
||||
async function getStaged(repo) {
|
||||
const head = await repo.getHeadCommit();
|
||||
|
||||
try {
|
||||
const emptyTree = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
|
||||
const headTree = await (head
|
||||
? head.getTree()
|
||||
: nodegit.Tree.lookup(repo, emptyTree)
|
||||
);
|
||||
return await nodegit.Diff.treeToIndex(repo, headTree, null);
|
||||
} catch (err) {
|
||||
console.warn('Cannot fetch staged changes:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
async function getUnstaged(repo) {
|
||||
const Diff = nodegit.Diff;
|
||||
return await Diff.indexToWorkdir(repo, null, {
|
||||
flags: Diff.OPTION.SHOW_UNTRACKED_CONTENT
|
||||
| Diff.OPTION.RECURSE_UNTRACKED_DIRS
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStaged,
|
||||
getUnstaged
|
||||
};
|
|
@ -1,6 +1,7 @@
|
|||
|
||||
function camelToSnake(str) {
|
||||
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
|
||||
}
|
||||
|
||||
module.exports.camelToSnake = camelToSnake;
|
||||
module.exports = {
|
||||
camelToSnake
|
||||
};
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
|
@ -11,7 +10,7 @@ class Clean extends Command {
|
|||
description: 'Cleans old applied versions'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
default: {
|
||||
remote: 'production'
|
||||
}
|
||||
|
|
17
myt-dump.js
17
myt-dump.js
|
@ -1,7 +1,7 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
const Dumper = require('./lib/dumper');
|
||||
|
||||
class Dump extends Command {
|
||||
static usage = {
|
||||
|
@ -9,14 +9,15 @@ class Dump extends Command {
|
|||
operand: 'remote'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
default: {
|
||||
remote: 'production'
|
||||
}
|
||||
};
|
||||
|
||||
async run(myt, opts) {
|
||||
const dumpStream = await myt.initDump('.dump.sql');
|
||||
const dumper = new Dumper(opts);
|
||||
await dumper.init('.dump.sql');
|
||||
|
||||
console.log('Dumping structure.');
|
||||
let dumpArgs = [
|
||||
|
@ -29,10 +30,10 @@ class Dump extends Command {
|
|||
'--databases'
|
||||
];
|
||||
dumpArgs = dumpArgs.concat(opts.schemas);
|
||||
await myt.runDump('docker-dump.sh', dumpArgs, dumpStream);
|
||||
await dumper.runDump('docker-dump.sh', dumpArgs);
|
||||
|
||||
console.log('Dumping fixtures.');
|
||||
await myt.dumpFixtures(dumpStream, opts.fixtures);
|
||||
await dumper.dumpFixtures(opts.fixtures);
|
||||
|
||||
console.log('Dumping privileges.');
|
||||
const privs = opts.privileges;
|
||||
|
@ -46,11 +47,11 @@ class Dump extends Command {
|
|||
if (privs.where) args.push('--where', privs.where);
|
||||
args = args.concat(['mysql'], privs.tables);
|
||||
|
||||
await dumpStream.write('USE `mysql`;\n', 'utf8');
|
||||
await myt.runDump('mysqldump', args, dumpStream);
|
||||
await dumper.use('mysql');
|
||||
await dumper.runDump('mysqldump', args);
|
||||
}
|
||||
|
||||
await dumpStream.end();
|
||||
await dumper.end();
|
||||
|
||||
console.log('Saving version.');
|
||||
await myt.dbConnect();
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const Dumper = require('./lib/dumper');
|
||||
|
||||
class Fixtures extends Command {
|
||||
static usage = {
|
||||
|
@ -8,16 +8,17 @@ class Fixtures extends Command {
|
|||
operand: 'remote'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
default: {
|
||||
remote: 'docker'
|
||||
}
|
||||
};
|
||||
|
||||
async run(myt, opts) {
|
||||
const dumpStream = await myt.initDump('fixtures.sql');
|
||||
await myt.dumpFixtures(dumpStream, opts.localFixtures, true);
|
||||
await dumpStream.end();
|
||||
const dumper = new Dumper(opts);
|
||||
await dumper.init('fixtures.sql');
|
||||
await dumper.dumpFixtures(opts.localFixtures, true);
|
||||
await dumper.end();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
const nodegit = require('nodegit');
|
||||
const ExporterEngine = require('./lib/exporter-engine');
|
||||
const repoExt = require('./lib/repo');
|
||||
|
||||
class Pull extends Command {
|
||||
static usage = {
|
||||
|
@ -17,7 +17,7 @@ class Pull extends Command {
|
|||
operand: 'remote'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
alias: {
|
||||
force: 'f',
|
||||
checkout: 'c',
|
||||
|
@ -52,14 +52,14 @@ class Pull extends Command {
|
|||
|
||||
// Check for unstaged changes
|
||||
|
||||
const unstagedDiff = await myt.getUnstaged(repo);
|
||||
const unstagedDiff = await repoExt.getUnstaged(repo);
|
||||
|
||||
if (await hasChanges(unstagedDiff))
|
||||
throw new Error('You have unstaged changes, save them before pull');
|
||||
|
||||
// Check for staged changes
|
||||
|
||||
const stagedDiff = await myt.getStaged(repo);
|
||||
const stagedDiff = await repoExt.getStaged(repo);
|
||||
|
||||
if (await hasChanges(stagedDiff))
|
||||
throw new Error('You have staged changes, save them before pull');
|
||||
|
|
73
myt-push.js
73
myt-push.js
|
@ -1,9 +1,10 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
const nodegit = require('nodegit');
|
||||
const ExporterEngine = require('./lib/exporter-engine');
|
||||
const connExt = require('./lib/conn');
|
||||
const repoExt = require('./lib/repo');
|
||||
|
||||
/**
|
||||
* Pushes changes to remote.
|
||||
|
@ -20,7 +21,7 @@ class Push extends Command {
|
|||
operand: 'remote'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
alias: {
|
||||
force: 'f',
|
||||
commit: 'c',
|
||||
|
@ -217,7 +218,7 @@ class Push extends Command {
|
|||
|
||||
let err;
|
||||
try {
|
||||
await myt.queryFromFile(pushConn,
|
||||
await connExt.queryFromFile(pushConn,
|
||||
`${scriptsDir}/${script}`);
|
||||
} catch (e) {
|
||||
err = e;
|
||||
|
@ -261,7 +262,7 @@ class Push extends Command {
|
|||
const gitExists = await fs.pathExists(`${opts.workspace}/.git`);
|
||||
|
||||
let nRoutines = 0;
|
||||
let changes = await myt.changedRoutines(version.gitCommit);
|
||||
let changes = await this.changedRoutines(version.gitCommit);
|
||||
changes = this.parseChanges(changes);
|
||||
|
||||
const routines = [];
|
||||
|
@ -334,7 +335,7 @@ class Push extends Command {
|
|||
if (change.type.name === 'VIEW')
|
||||
await pushConn.query(`USE ${scapedSchema}`);
|
||||
|
||||
await myt.multiQuery(pushConn, newSql);
|
||||
await connExt.multiQuery(pushConn, newSql);
|
||||
|
||||
if (change.isRoutine) {
|
||||
await conn.query(
|
||||
|
@ -416,6 +417,68 @@ class Push extends Command {
|
|||
);
|
||||
}
|
||||
|
||||
async changedRoutines(commitSha) {
|
||||
const repo = await this.myt.openRepo();
|
||||
const changes = [];
|
||||
const changesMap = new Map();
|
||||
|
||||
async function pushChanges(diff) {
|
||||
if (!diff) return;
|
||||
const patches = await diff.patches();
|
||||
|
||||
for (const patch of patches) {
|
||||
const path = patch.newFile().path();
|
||||
const match = path.match(/^routines\/(.+)\.sql$/);
|
||||
if (!match) continue;
|
||||
|
||||
let change = changesMap.get(match[1]);
|
||||
if (!change) {
|
||||
change = {path: match[1]};
|
||||
changes.push(change);
|
||||
changesMap.set(match[1], change);
|
||||
}
|
||||
change.mark = patch.isDeleted() ? '-' : '+';
|
||||
}
|
||||
}
|
||||
|
||||
const head = await repo.getHeadCommit();
|
||||
|
||||
if (head && commitSha) {
|
||||
let commit;
|
||||
let notFound;
|
||||
|
||||
try {
|
||||
commit = await repo.getCommit(commitSha);
|
||||
notFound = false;
|
||||
} catch (err) {
|
||||
if (err.errorFunction == 'Commit.lookup')
|
||||
notFound = true;
|
||||
else
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (notFound) {
|
||||
console.warn(`Database commit not found, trying git fetch`.yellow);
|
||||
await repo.fetchAll();
|
||||
commit = await repo.getCommit(commitSha);
|
||||
}
|
||||
|
||||
const commitTree = await commit.getTree();
|
||||
|
||||
const headTree = await head.getTree();
|
||||
const diff = await headTree.diff(commitTree);
|
||||
await pushChanges(diff);
|
||||
}
|
||||
|
||||
await pushChanges(await repoExt.getUnstaged(repo));
|
||||
await pushChanges(await repoExt.getStaged(repo));
|
||||
|
||||
return changes.sort((a, b) => {
|
||||
if (b.mark != a.mark)
|
||||
return b.mark == '-' ? 1 : -1;
|
||||
return a.path.localeCompare(b.path);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const typeMap = {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const Push = require('./myt-push');
|
||||
|
@ -6,6 +5,7 @@ const docker = require('./lib/docker');
|
|||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const Server = require('./lib/server');
|
||||
const connExt = require('./lib/conn');
|
||||
|
||||
/**
|
||||
* Builds the database image and runs a container. It only rebuilds the
|
||||
|
@ -22,7 +22,7 @@ class Run extends Command {
|
|||
}
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
alias: {
|
||||
ci: 'c',
|
||||
random: 'r'
|
||||
|
@ -144,7 +144,7 @@ class Run extends Command {
|
|||
|
||||
const triggersDir = await fs.readdir(triggersPath);
|
||||
for (const triggerFile of triggersDir)
|
||||
await myt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
||||
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
||||
}
|
||||
|
||||
return server;
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const Container = require('./lib/docker').Container;
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
|
@ -15,7 +14,7 @@ class Version extends Command {
|
|||
operand: 'name'
|
||||
};
|
||||
|
||||
static localOpts = {
|
||||
static opts = {
|
||||
alias: {
|
||||
name: 'n'
|
||||
},
|
||||
|
|
332
myt.js
332
myt.js
|
@ -10,12 +10,9 @@ const path = require('path');
|
|||
const mysql = require('mysql2/promise');
|
||||
const nodegit = require('nodegit');
|
||||
const camelToSnake = require('./lib/util').camelToSnake;
|
||||
const docker = require('./lib/docker');
|
||||
const Command = require('./lib/command');
|
||||
|
||||
class Myt {
|
||||
get usage() {
|
||||
return {
|
||||
static usage = {
|
||||
description: 'Utility for database versioning',
|
||||
params: {
|
||||
remote: 'Name of remote to use',
|
||||
|
@ -26,10 +23,8 @@ class Myt {
|
|||
help: 'Display this help message'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
get localOpts() {
|
||||
return {
|
||||
static opts = {
|
||||
alias: {
|
||||
remote: 'r',
|
||||
workspace: 'w',
|
||||
|
@ -42,20 +37,19 @@ class Myt {
|
|||
'debug',
|
||||
'version',
|
||||
'help'
|
||||
],
|
||||
default: {
|
||||
workspace: process.cwd()
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
async run(CommandClass) {
|
||||
async run(Command) {
|
||||
console.log(
|
||||
'Myt'.green,
|
||||
`v${packageJson.version}`.magenta
|
||||
);
|
||||
|
||||
const baseOpts = this.localOpts;
|
||||
let baseOpts = this.constructor.opts;
|
||||
baseOpts.default = Object.assign(baseOpts.default || {}, {
|
||||
workspace: process.cwd()
|
||||
});
|
||||
const opts = this.getopts(baseOpts);
|
||||
|
||||
if (opts.debug) {
|
||||
|
@ -68,7 +62,7 @@ class Myt {
|
|||
|
||||
try {
|
||||
const commandName = opts._[0];
|
||||
if (!CommandClass && commandName) {
|
||||
if (!Command && commandName) {
|
||||
if (!/^[a-z]+$/.test(commandName))
|
||||
throw new Error (`Invalid command name '${commandName}'`);
|
||||
|
||||
|
@ -76,20 +70,20 @@ class Myt {
|
|||
|
||||
if (!await fs.pathExists(commandFile))
|
||||
throw new Error (`Unknown command '${commandName}'`);
|
||||
CommandClass = require(commandFile);
|
||||
Command = require(commandFile);
|
||||
}
|
||||
|
||||
if (!CommandClass) {
|
||||
this.showHelp(baseOpts, this.usage);
|
||||
if (!Command) {
|
||||
this.showHelp(baseOpts, this.constructor.usage);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const allOpts = Object.assign({}, baseOpts);
|
||||
|
||||
if (CommandClass.localOpts)
|
||||
for (const key in CommandClass.localOpts) {
|
||||
if (Command.opts)
|
||||
for (const key in Command.opts) {
|
||||
const baseValue = baseOpts[key];
|
||||
const cmdValue = CommandClass.localOpts[key];
|
||||
const cmdValue = Command.opts[key];
|
||||
if (Array.isArray(baseValue))
|
||||
allOpts[key] = baseValue.concat(cmdValue);
|
||||
else if (typeof baseValue == 'object')
|
||||
|
@ -103,7 +97,7 @@ class Myt {
|
|||
console.log('Command options:'.magenta, commandOpts);
|
||||
Object.assign(opts, commandOpts);
|
||||
|
||||
const operandToOpt = CommandClass.usage.operand;
|
||||
const operandToOpt = Command.usage.operand;
|
||||
if (opts._.length >= 2 && operandToOpt)
|
||||
opts[operandToOpt] = opts._[1];
|
||||
|
||||
|
@ -111,7 +105,7 @@ class Myt {
|
|||
console.log('Final options:'.magenta, opts);
|
||||
|
||||
if (opts.help) {
|
||||
this.showHelp(CommandClass.localOpts, CommandClass.usage, commandName);
|
||||
this.showHelp(Command.opts, Command.usage, commandName);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
|
@ -150,7 +144,7 @@ class Myt {
|
|||
parameter('Remote:', opts.remote || 'local');
|
||||
|
||||
await this.load(opts);
|
||||
await this.runCommand(CommandClass, opts);
|
||||
await this.runCommand(Command, opts);
|
||||
await this.unload();
|
||||
} catch (err) {
|
||||
if (err.name == 'Error' && !opts.debug) {
|
||||
|
@ -169,9 +163,8 @@ class Myt {
|
|||
process.exit();
|
||||
}
|
||||
|
||||
async runCommand(CommandClass, opts) {
|
||||
const command = new CommandClass();
|
||||
command.opts = opts;
|
||||
async runCommand(Command, opts) {
|
||||
const command = new Command(this, opts);
|
||||
return await command.run(this, opts);
|
||||
}
|
||||
|
||||
|
@ -315,70 +308,6 @@ class Myt {
|
|||
};
|
||||
}
|
||||
|
||||
async changedRoutines(commitSha) {
|
||||
const repo = await this.openRepo();
|
||||
const changes = [];
|
||||
const changesMap = new Map();
|
||||
|
||||
async function pushChanges(diff) {
|
||||
if (!diff) return;
|
||||
const patches = await diff.patches();
|
||||
|
||||
for (const patch of patches) {
|
||||
const path = patch.newFile().path();
|
||||
const match = path.match(/^routines\/(.+)\.sql$/);
|
||||
if (!match) continue;
|
||||
|
||||
let change = changesMap.get(match[1]);
|
||||
if (!change) {
|
||||
change = {path: match[1]};
|
||||
changes.push(change);
|
||||
changesMap.set(match[1], change);
|
||||
}
|
||||
change.mark = patch.isDeleted() ? '-' : '+';
|
||||
}
|
||||
}
|
||||
|
||||
const head = await repo.getHeadCommit();
|
||||
|
||||
if (head && commitSha) {
|
||||
let commit;
|
||||
let notFound;
|
||||
|
||||
try {
|
||||
commit = await repo.getCommit(commitSha);
|
||||
notFound = false;
|
||||
} catch (err) {
|
||||
if (err.errorFunction == 'Commit.lookup')
|
||||
notFound = true;
|
||||
else
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (notFound) {
|
||||
console.warn(`Database commit not found, trying git fetch`.yellow);
|
||||
await repo.fetchAll();
|
||||
commit = await repo.getCommit(commitSha);
|
||||
}
|
||||
|
||||
const commitTree = await commit.getTree();
|
||||
|
||||
const headTree = await head.getTree();
|
||||
const diff = await headTree.diff(commitTree);
|
||||
await pushChanges(diff);
|
||||
}
|
||||
|
||||
await pushChanges(await this.getUnstaged(repo));
|
||||
await pushChanges(await this.getStaged(repo));
|
||||
|
||||
return changes.sort((a, b) => {
|
||||
if (b.mark != a.mark)
|
||||
return b.mark == '-' ? 1 : -1;
|
||||
return a.path.localeCompare(b.path);
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
async openRepo() {
|
||||
const {opts} = this;
|
||||
|
||||
|
@ -388,94 +317,6 @@ class Myt {
|
|||
return await nodegit.Repository.open(opts.workspace);
|
||||
}
|
||||
|
||||
async getStaged(repo) {
|
||||
const head = await repo.getHeadCommit();
|
||||
|
||||
try {
|
||||
const emptyTree = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
|
||||
const headTree = await (head
|
||||
? head.getTree()
|
||||
: nodegit.Tree.lookup(repo, emptyTree)
|
||||
);
|
||||
return await nodegit.Diff.treeToIndex(repo, headTree, null);
|
||||
} catch (err) {
|
||||
console.warn('Cannot fetch staged changes:', err.message);
|
||||
}
|
||||
}
|
||||
|
||||
async getUnstaged(repo) {
|
||||
return await nodegit.Diff.indexToWorkdir(repo, null, {
|
||||
flags: nodegit.Diff.OPTION.SHOW_UNTRACKED_CONTENT
|
||||
| nodegit.Diff.OPTION.RECURSE_UNTRACKED_DIRS
|
||||
});
|
||||
}
|
||||
|
||||
async initDump(dumpFile) {
|
||||
const dumpDir = this.opts.dumpDir;
|
||||
if (!await fs.pathExists(dumpDir))
|
||||
await fs.mkdir(dumpDir);
|
||||
|
||||
const dumpPath = path.join(dumpDir, dumpFile);
|
||||
|
||||
// FIXME: If it's called after docker.build() statement it creates an
|
||||
// "invalid" WriteStream
|
||||
const dumpStream = await fs.createWriteStream(dumpPath);
|
||||
|
||||
await docker.build(__dirname, {
|
||||
tag: 'myt/client',
|
||||
file: path.join(__dirname, 'server', 'Dockerfile.client')
|
||||
}, this.opts.debug);
|
||||
|
||||
return dumpStream;
|
||||
}
|
||||
|
||||
async dumpFixtures(dumpStream, tables, replace) {
|
||||
const fixturesArgs = [
|
||||
'--no-create-info',
|
||||
'--skip-triggers',
|
||||
'--skip-extended-insert',
|
||||
'--skip-disable-keys',
|
||||
'--skip-add-locks',
|
||||
'--skip-set-charset',
|
||||
'--skip-comments',
|
||||
'--skip-tz-utc'
|
||||
];
|
||||
|
||||
if (replace)
|
||||
fixturesArgs.push('--replace');
|
||||
|
||||
for (const schema in tables) {
|
||||
const escapedSchema = '`'+ schema.replace('`', '``') +'`';
|
||||
await dumpStream.write(
|
||||
`USE ${escapedSchema};\n`,
|
||||
'utf8'
|
||||
);
|
||||
|
||||
const args = fixturesArgs.concat([schema], tables[schema]);
|
||||
await this.runDump('mysqldump', args, dumpStream);
|
||||
}
|
||||
}
|
||||
|
||||
async runDump(command, args, dumpStream) {
|
||||
const iniPath = path.join(this.opts.subdir || '', 'remotes', this.opts.iniFile);
|
||||
const myArgs = [
|
||||
`--defaults-file=${iniPath}`
|
||||
];
|
||||
const execOptions = {
|
||||
stdio: [
|
||||
process.stdin,
|
||||
dumpStream,
|
||||
process.stderr
|
||||
]
|
||||
};
|
||||
const commandArgs = [command].concat(myArgs, args);
|
||||
await docker.run('myt/client', commandArgs, {
|
||||
addHost: 'host.docker.internal:host-gateway',
|
||||
volume: `${this.opts.mytDir}:/workspace`,
|
||||
rm: true
|
||||
}, execOptions);
|
||||
}
|
||||
|
||||
showHelp(opts, usage, command) {
|
||||
const prefix = `${'Usage:'.gray} [npx] myt`;
|
||||
|
||||
|
@ -505,139 +346,6 @@ class Myt {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an SQL script.
|
||||
*
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async queryFromFile(conn, file) {
|
||||
const sql = await fs.readFile(file, 'utf8');
|
||||
return await this.multiQuery(conn, sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a multi-query string.
|
||||
*
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @param {String} sql SQL multi-query string
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async multiQuery(conn, sql) {
|
||||
let results = [];
|
||||
const stmts = this.querySplit(sql);
|
||||
|
||||
for (const stmt of stmts)
|
||||
results = results.concat(await conn.query(stmt));
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits an SQL muti-query into a single-query array, it does an small
|
||||
* parse to correctly handle the DELIMITER statement.
|
||||
*
|
||||
* @param {Array<String>} stmts The splitted SQL statements
|
||||
*/
|
||||
querySplit(sql) {
|
||||
const stmts = [];
|
||||
let i,
|
||||
char,
|
||||
token,
|
||||
escaped,
|
||||
stmtStart;
|
||||
|
||||
let delimiter = ';';
|
||||
const delimiterRe = /\s*delimiter\s+(\S+)[^\S\r\n]*(?:\r?\n|\r|$)/yi;
|
||||
|
||||
function begins(str) {
|
||||
let j;
|
||||
for (j = 0; j < str.length; j++)
|
||||
if (sql[i + j] != str[j])
|
||||
return false;
|
||||
i += j;
|
||||
return true;
|
||||
}
|
||||
|
||||
for (i = 0; i < sql.length;) {
|
||||
stmtStart = i;
|
||||
|
||||
delimiterRe.lastIndex = i;
|
||||
const match = sql.match(delimiterRe);
|
||||
if (match) {
|
||||
delimiter = match[1];
|
||||
i += match[0].length;
|
||||
continue;
|
||||
}
|
||||
|
||||
let delimiterFound = false;
|
||||
while (i < sql.length) {
|
||||
char = sql[i];
|
||||
|
||||
if (token) {
|
||||
if (!escaped && begins(token.end))
|
||||
token = null;
|
||||
else {
|
||||
escaped = !escaped && token.escape(char);
|
||||
i++;
|
||||
}
|
||||
} else {
|
||||
delimiterFound = begins(delimiter);
|
||||
if (delimiterFound) break;
|
||||
|
||||
const tok = tokenIndex.get(char);
|
||||
if (tok && begins(tok.start))
|
||||
token = tok;
|
||||
else
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
let len = i - stmtStart;
|
||||
if (delimiterFound) len -= delimiter.length;
|
||||
const stmt = sql.substr(stmtStart, len);
|
||||
|
||||
if (!/^\s*$/.test(stmt))
|
||||
stmts.push(stmt);
|
||||
}
|
||||
|
||||
return stmts;
|
||||
}
|
||||
}
|
||||
|
||||
const tokens = {
|
||||
string: {
|
||||
start: '\'',
|
||||
end: '\'',
|
||||
escape: char => char == '\'' || char == '\\'
|
||||
},
|
||||
quotedString: {
|
||||
start: '"',
|
||||
end: '"',
|
||||
escape: char => char == '"' || char == '\\'
|
||||
},
|
||||
id: {
|
||||
start: '`',
|
||||
end: '`',
|
||||
escape: char => char == '`'
|
||||
},
|
||||
multiComment: {
|
||||
start: '/*',
|
||||
end: '*/',
|
||||
escape: () => false
|
||||
},
|
||||
singleComment: {
|
||||
start: '-- ',
|
||||
end: '\n',
|
||||
escape: () => false
|
||||
}
|
||||
};
|
||||
|
||||
const tokenIndex = new Map();
|
||||
for (const tokenId in tokens) {
|
||||
const token = tokens[tokenId];
|
||||
tokenIndex.set(token.start[0], token);
|
||||
}
|
||||
|
||||
module.exports = Myt;
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.5.6",
|
||||
"version": "1.5.7",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.5.6",
|
||||
"version": "1.5.7",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@sqltools/formatter": "^1.2.3",
|
||||
|
@ -21,9 +21,7 @@
|
|||
"sha.js": "^2.4.11"
|
||||
},
|
||||
"bin": {
|
||||
"myt": "cli.js",
|
||||
"myv": "cli.js",
|
||||
"myvc": "cli.js"
|
||||
"myt": "cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@sindresorhus/is": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.5.6",
|
||||
"version": "1.5.7",
|
||||
"author": "Verdnatura Levante SL",
|
||||
"description": "MySQL version control",
|
||||
"license": "GPL-3.0",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#!/bin/bash
|
||||
|
||||
# FIXME: It can corrupt data
|
||||
mysqldump $@ | sed 's/ AUTO_INCREMENT=[0-9]* //g'
|
||||
mysqldump $@ | sed 's/ AUTO_INCREMENT=[0-9]*//g'
|
||||
|
|
|
@ -2,4 +2,5 @@
|
|||
node_modules
|
||||
remotes/*.ini
|
||||
!remotes/local.ini
|
||||
!remotes/docker.ini
|
||||
dump/.changes
|
||||
|
|
|
@ -8,6 +8,6 @@
|
|||
"type": "git"
|
||||
},
|
||||
"dependencies": {
|
||||
"@verdnatura/myt": "^1.5.6"
|
||||
"@verdnatura/myt": "^1.5.7"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
[client]
|
||||
host = host.docker.internal
|
||||
port = 3306
|
||||
user = root
|
||||
password = root
|
Loading…
Reference in New Issue