Alpha release

This commit is contained in:
Juan Ferrer 2020-12-02 08:35:26 +01:00
parent 61de39aab9
commit 9d71ee7d23
37 changed files with 2345 additions and 942 deletions

View File

@ -1,4 +1,2 @@
node_modules node_modules
package.json .git
package-lock.json
README.md

View File

@ -1,26 +1,11 @@
FROM debian:bullseye-slim FROM debian:bullseye-slim
ENV TZ Europe/Madrid
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends \ && apt-get install -y --no-install-recommends \
mariadb-client \ mariadb-client \
libmariadb3 \ libmariadb3 \
git \
jq \
iputils-ping \
dnsutils \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY \ COPY myvc-dump.sh /usr/local/bin/
myvc-dump.sh \
myvc-push.sh \
structure.sql \
/usr/local/bin/
COPY \
workspace/remotes/local.ini \
/usr/local/bin/db.ini
WORKDIR /workspace WORKDIR /workspace

View File

@ -8,13 +8,11 @@ development, so it may not be fully functional.
Any help is welcomed! Feel free to contribute. Any help is welcomed! Feel free to contribute.
## Prerequisites ## Requirements
Required applications. * Node.js <= 12.0
* Node.js = 12.17.0 LTS
* Git * Git
* Docker * Docker (Only to setup a local server)
## Installation ## Installation
@ -41,19 +39,19 @@ $ myvc [-w|--workspace] [-e|--env] [-h|--help] command
``` ```
The default workspace directory is the current working directory and unless The default workspace directory is the current working directory and unless
otherwise indicated, the default environment is *production*. otherwise indicated, the default environment is *local*.
Commands for database versioning: Commands for database versioning:
* **init**: Initialize an empty workspace. * **init**: Initialize an empty workspace.
* **pull**: Export database routines into workspace. * **pull**: Export database routines into workspace.
* **push**: Apply changes into database, uses *test* environment by default. * **push**: Apply changes into database.
Commands for local server management: Commands for local server management:
* **dump**: Export database structure and fixtures. * **dump**: Export database structure and fixtures from *production*.
* **run**: Builds and starts local database server container. * **run**: Build and starts local database server container.
* **start**: Starts local database server container. * **start**: Start local database server container.
Each command can have its own specific commandline options. Each command can have its own specific commandline options.
@ -65,7 +63,7 @@ First of all you have to initalize your workspace.
$ myvc init $ myvc init
``` ```
Now yoy can configure MyVC using *myvc.config.json* file, located at the root of Now you can configure MyVC using *myvc.config.yml* file, located at the root of
your workspace. This file should include the project codename and schemas/tables your workspace. This file should include the project codename and schemas/tables
wich are exported when you use *pull* or *dump* commands. wich are exported when you use *pull* or *dump* commands.

View File

@ -1,39 +0,0 @@
const path = require('path');
const execFile = require('child_process').execFile;
const spawn = require('child_process').spawn;
module.exports = async function(command, workspace, ...args) {
const buildArgs = [
'build',
'-t', 'myvc/client',
'-f', path.join(__dirname, 'Dockerfile.client'),
__dirname
];
await new Promise((resolve, reject) => {
execFile('docker', buildArgs, (err, stdout, stderr) => {
if (err)
return reject(err);
resolve({stdout, stderr});
});
})
let runArgs = [
'run',
'-v', `${workspace}:/workspace`,
'myvc/client',
command
];
runArgs = runArgs.concat(args);
await new Promise((resolve, reject) => {
const child = spawn('docker', runArgs, {
stdio: [
process.stdin,
process.stdout,
process.stderr
]
});
child.on('exit', code => resolve(code));
})
};

338
docker.js
View File

@ -1,253 +1,125 @@
const spawn = require('child_process').spawn;
const execFile = require('child_process').execFile; const execFile = require('child_process').execFile;
const log = require('fancy-log');
const path = require('path');
module.exports = class Docker { const docker = {
constructor(name, context) { async run(image, commandArgs, options, execOptions) {
Object.assign(this, { const args = commandArgs
id: name, ? [image].concat(commandArgs)
name, : image;
isRandom: name == null, const execMode = options.detach ? 'exec' : 'spawn';
dbConf: {
host: 'localhost',
port: '3306',
username: 'root',
password: 'root'
},
imageTag: name || 'myvc/dump',
context
});
}
/** const child = await this.exec('run',
* Builds the database image and runs a container. It only rebuilds the args,
* image when fixtures have been modified or when the day on which the options,
* image was built is different to today. Some workarounds have been used execMode,
* to avoid a bug with OverlayFS driver on MacOS. execOptions
* );
* @param {Boolean} ci continuous integration environment argument return options.detach
*/ ? new Container(child.stdout.trim())
async run(ci) { : child;
let dockerfilePath = path.join(__dirname, 'Dockerfile'); },
await this.execFile('docker', [ async build(url, options, execOptions) {
'build', return await this.exec('build',
'-t', 'myvc/server', url,
'-f', `${dockerfilePath}.server`, options,
__dirname 'spawn',
]); execOptions
);
},
let d = new Date(); async start(id, options) {
let pad = v => v < 10 ? '0' + v : v; const ct = new Container(id);
let stamp = `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())}`; await ct.start(options);
return ct;
},
await this.execFile('docker', [ async stop(id, options) {
'build', const ct = new Container(id);
'-t', this.imageTag, return await ct.stop(options);
'-f', `${dockerfilePath}.dump`, },
'--build-arg', `STAMP=${stamp}`,
this.context
]);
let dockerArgs; async rm(id, options) {
const ct = new Container(id);
return await ct.rm(options);
},
if (this.isRandom) async inspect(id, options) {
dockerArgs = ['-p', '3306']; const ct = new Container(id);
else { return await ct.inspect(options);
try { },
await this.rm();
} catch (e) {}
dockerArgs = ['--name', this.name, '-p', `3306:${this.dbConf.port}`];
}
let runChown = process.platform != 'linux'; async exec(command, args, options, execMode, execOptions) {
const container = await this.execFile('docker', [ const execArgs = [command];
'run',
'--env', `RUN_CHOWN=${runChown}`,
'-d',
...dockerArgs,
this.imageTag
]);
this.id = container.stdout.trim();
try { if (options)
if (this.isRandom) { for (const option in options) {
let netSettings = await this.execJson('docker', [ execArgs.push(`--${camelToSnake(option)}`);
'inspect', '-f', '{{json .NetworkSettings}}', this.id if (typeof options[option] !== 'boolean')
]); execArgs.push(options[option]);
if (ci)
this.dbConf.host = netSettings.Gateway;
this.dbConf.port = netSettings.Ports['3306/tcp'][0]['HostPort'];
} }
await this.wait(); if (Array.isArray(args))
} catch (err) { Array.prototype.push.apply(execArgs, args);
if (this.isRandom) else if (args)
await this.rm(); execArgs.push(args);
throw err;
}
}
/** return await new Promise((resolve, reject) => {
* Does the minium effort to start the database container, if it doesn't if (execMode == 'spawn') {
* exists calls the 'docker' task, if it is started does nothing. Keep in if (execOptions === true)
* mind that when you do not rebuild the docker you may be using an outdated execOptions = {
* version of it. stdio: [
*/ process.stdin,
async start() { process.stdout,
let state; process.stderr
try { ]
state = await this.execJson('docker', [ };
'inspect', '-f', '{{json .State}}', this.id
]);
} catch (err) {
return await this.run();
}
switch (state.Status) { const child = spawn('docker', execArgs, execOptions || undefined);
case 'running': child.on('exit', code => {
return; if (code !== 0) {
case 'exited': const args = JSON.stringify(execArgs);
await this.execFile('docker', ['start', this.id]); reject(new Error(`'docker' ${args}: Exit code: ${code}`));
await this.wait(); } else
return; resolve(code);
default: });
throw new Error(`Unknown docker status: ${state.Status}`); } else {
} execFile('docker', execArgs, (err, stdout, stderr) => {
} if (err)
reject(err);
waitForHealthy() {
return new Promise((resolve, reject) => {
let interval = 100;
let elapsedTime = 0;
let maxInterval = 4 * 60 * 1000;
log('Waiting for container to be ready...');
async function checker() {
elapsedTime += interval;
let status;
try {
let status = await this.execJson('docker', [
'inspect', '-f', '{{.State.Health.Status}}', this.id
]);
status = status.trimEnd();
} catch (err) {
return reject(new Error(err.message));
}
if (status == 'unhealthy')
return reject(new Error('Docker exited, please see the docker logs for more info'));
if (status == 'healthy') {
log('Container ready.');
return resolve();
}
if (elapsedTime >= maxInterval)
reject(new Error(`Container not initialized whithin ${elapsedTime / 1000} secs`));
else
setTimeout(bindedChecker, interval);
}
let bindedChecker = checker.bind(this);
bindedChecker();
});
}
wait() {
return new Promise((resolve, reject) => {
const mysql = require('mysql2');
let interval = 100;
let elapsedTime = 0;
let maxInterval = 4 * 60 * 1000;
let myConf = {
user: this.dbConf.username,
password: this.dbConf.password,
host: this.dbConf.host,
port: this.dbConf.port
};
log('Waiting for MySQL init process...');
async function checker() {
elapsedTime += interval;
let state;
try {
state = await this.execJson('docker', [
'inspect', '-f', '{{json .State}}', this.id
]);
} catch (err) {
return reject(new Error(err.message));
}
if (state.Status === 'exited')
return reject(new Error('Docker exited, please see the docker logs for more info'));
let conn = mysql.createConnection(myConf);
conn.on('error', () => {});
conn.connect(err => {
conn.destroy();
if (!err) {
log('MySQL process ready.');
return resolve();
}
if (elapsedTime >= maxInterval)
reject(new Error(`MySQL not initialized whithin ${elapsedTime / 1000} secs`));
else else
setTimeout(bindedChecker, interval); resolve({stdout, stderr});
}); });
} }
let bindedChecker = checker.bind(this);
bindedChecker();
}); });
} }
async rm() {
try {
await this.execFile('docker', ['stop', this.id]);
await this.execFile('docker', ['rm', '-v', this.id]);
} catch (e) {}
}
/**
* Promisified version of execFile().
*
* @param {String} command The exec command
* @param {Array} args The command arguments
* @return {Promise} The promise
*/
execFile(command, args) {
return new Promise((resolve, reject) => {
execFile(command, args, (err, stdout, stderr) => {
if (err)
reject(err);
else {
resolve({
stdout: stdout,
stderr: stderr
});
}
});
});
}
/**
* Executes a command whose return is json.
*
* @param {String} command The exec command
* @param {Array} args The command arguments
* @return {Object} The parsed JSON
*/
async execJson(command, args) {
const result = await this.execFile(command, args);
return JSON.parse(result.stdout);
}
}; };
class Container {
construct(id) {
this.id = id;
}
async start(options) {
await docker.exec('start', this.id, options);
}
async stop(options) {
await docker.exec('stop', this.id, options);
}
async rm(options) {
await docker.exec('rm', this.id, options);
}
async inspect(options) {
const child = await docker.exec('inspect', this.id, options);
return JSON.parse(child.stdout);
}
}
function camelToSnake(str) {
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
}
module.exports = docker;

305
index.js
View File

@ -1,76 +1,109 @@
require('require-yaml');
require('colors'); require('colors');
const getopts = require('getopts'); const getopts = require('getopts');
const package = require('./package.json'); const packageJson = require('./package.json');
const fs = require('fs-extra'); const fs = require('fs-extra');
const ini = require('ini'); const ini = require('ini');
const path = require('path'); const path = require('path');
const dockerRun = require('./docker-run'); const mysql = require('mysql2/promise');
const nodegit = require('nodegit');
console.log('MyVC (MySQL Version Control)'.green, `v${package.version}`.magenta);
const argv = process.argv.slice(2);
const cliOpts = getopts(argv, {
alias: {
env: 'e',
workspace: 'w',
help: 'h',
version: 'v'
},
default: {
workspace: process.cwd(),
env: 'production'
}
})
if (cliOpts.version)
process.exit(0);
const command = cliOpts._[0];
if (!command) {
console.log('Usage:'.gray, '[npx] myvc [-w|--workspace] [-e|--env] [-h|--help] command'.blue);
process.exit(0);
}
const commandArgs = {
push: {
alias: {
env: 'e',
force: 'f',
user: 'u',
applyUncommited: 'a'
},
default: {
env: 'test'
}
}
};
const commandOpts = getopts(argv, commandArgs[command]);
Object.assign(cliOpts, commandOpts);
const opts = {};
for (let opt in cliOpts) {
if (opt.length > 1 || opt == '_')
opts[opt] = cliOpts[opt];
}
function parameter(parameter, value) {
console.log(parameter.gray, value.blue);
}
parameter('Environment:', opts.env);
parameter('Workspace:', opts.workspace);
parameter('Command:', command);
class MyVC { class MyVC {
async run(command) {
console.log(
'MyVC (MySQL Version Control)'.green,
`v${packageJson.version}`.magenta
);
const opts = {};
const argv = process.argv.slice(2);
const cliOpts = getopts(argv, {
alias: {
env: 'e',
workspace: 'w',
debug: 'd',
version: 'v',
help: 'h'
},
default: {
workspace: process.cwd()
}
})
if (cliOpts.version)
process.exit(0);
try {
if (!command) {
const commandName = cliOpts._[0];
if (!commandName) {
console.log(
'Usage:'.gray,
'[npx] myvc'
+ '[-w|--workspace]'
+ '[-e|--env]'
+ '[-d|--debug]'
+ '[-h|--help]'
+ '[-v|--version]'
+ 'command'.blue
);
process.exit(0);
}
const commands = [
'init',
'pull',
'push',
'dump',
'start',
'run'
];
if (commands.indexOf(commandName) == -1)
throw new Error (`Unknown command '${commandName}'`);
const Klass = require(`./myvc-${commandName}`);
command = new Klass();
}
const commandOpts = getopts(argv, command.myOpts);
Object.assign(cliOpts, commandOpts);
for (const opt in cliOpts) {
if (opt.length > 1 || opt == '_')
opts[opt] = cliOpts[opt];
}
parameter('Workspace:', opts.workspace);
parameter('Environment:', opts.env);
await this.load(opts);
command.opts = opts;
await command.run(this, opts);
await this.unload();
} catch (err) {
if (err.name == 'Error' && !opts.debug)
console.error('Error:'.gray, err.message.red);
else
throw err;
}
function parameter(parameter, value) {
console.log(parameter.gray, (value || 'null').blue);
}
process.exit();
}
async load(opts) { async load(opts) {
// Configuration file // Configuration file
const configFile = 'myvc.config.json'; const config = require(`${__dirname}/myvc.default.yml`);
const configFile = 'myvc.config.yml';
const configPath = path.join(opts.workspace, configFile); const configPath = path.join(opts.workspace, configFile);
if (!await fs.pathExists(configPath)) if (await fs.pathExists(configPath))
throw new Error(`Config file not found: ${configFile}`); Object.assign(config, require(configPath));
const config = require(configPath);
Object.assign(opts, config); Object.assign(opts, config);
opts.configFile = configFile; opts.configFile = configFile;
@ -90,10 +123,11 @@ class MyVC {
const iniConfig = ini.parse(await fs.readFile(iniPath, 'utf8')).client; const iniConfig = ini.parse(await fs.readFile(iniPath, 'utf8')).client;
const dbConfig = { const dbConfig = {
host: !opts.env ? 'localhost' : iniConfig.host, host: iniConfig.host,
port: iniConfig.port, port: iniConfig.port,
user: iniConfig.user, user: iniConfig.user,
password: iniConfig.password, password: iniConfig.password,
database: opts.versionSchema,
authPlugins: { authPlugins: {
mysql_clear_password() { mysql_clear_password() {
return () => iniConfig.password + '\0'; return () => iniConfig.password + '\0';
@ -107,81 +141,112 @@ class MyVC {
rejectUnauthorized: iniConfig.ssl_verify_server_cert != undefined rejectUnauthorized: iniConfig.ssl_verify_server_cert != undefined
} }
} }
if (!opts.env)
dbConfig.socketPath = '/var/run/mysqld/mysqld.sock';
Object.assign(opts, { Object.assign(opts, {
iniFile, iniFile,
dbConfig dbConfig
}); });
this.opts = opts;
} }
async init(opts) { async dbConnect() {
const templateDir = `${__dirname}/workspace`; if (!this.conn)
const templates = await fs.readdir(templateDir); this.conn = await this.createConnection();
for (let template of templates){ return this.conn;
const dst = `${opts.workspace}/${template}`; }
if (!await fs.pathExists(dst))
await fs.copy(`${templateDir}/${template}`, dst); async createConnection() {
return await mysql.createConnection(this.opts.dbConfig);
}
async unload() {
if (this.conn)
await this.conn.end();
}
async fetchDbVersion() {
const {opts} = this;
const [[res]] = await this.conn.query(
`SELECT COUNT(*) > 0 tableExists
FROM information_schema.tables
WHERE TABLE_SCHEMA = ?
AND TABLE_NAME = 'version'`,
[opts.versionSchema]
);
if (!res.tableExists) {
const structure = await fs.readFile(`${__dirname}/structure.sql`, 'utf8');
await this.conn.query(structure);
return null;
} }
const [[version]] = await this.conn.query(
`SELECT number, gitCommit
FROM version WHERE code = ?`,
[opts.code]
);
return version;
} }
async pull(opts) { async changedRoutines(commit) {
const pull = require('./myvc-pull'); const repo = await nodegit.Repository.open(this.opts.workspace);
await pull(
opts.workspace, const from = await repo.getCommit(commit);
opts.schemas, const fromTree = await from.getTree();
opts.dbConfig
const to = await repo.getHeadCommit();
const toTree = await to.getTree();
const diff = await toTree.diff(fromTree);
const patches = await diff.patches();
const changes = [];
for (const patch of patches) {
const path = patch.newFile().path();
const match = path.match(/^routines\/(.+)\.sql$/);
if (!match) continue;
changes.push({
mark: patch.isDeleted() ? '-' : '+',
path: match[1]
});
}
return changes.sort(
(a, b) => b.mark == '-' && b.mark != a.mark ? 1 : -1
); );
} }
async push(opts) { async cachedChanges() {
let args = []; const changes = [];
if (opts.force) args.push('-f'); const dumpDir = `${this.opts.workspace}/dump`;
if (opts.user) args.push('-u'); const dumpChanges = `${dumpDir}/.changes`;
if (opts.applyUncommited) args.push('-a');
if (opts.env) args = args.concat(['-e', opts.env]);
await dockerRun('myvc-push.sh', if (!await fs.pathExists(dumpChanges))
opts.workspace, return null;
...args
);
}
async dump (opts) { const readline = require('readline');
await dockerRun('myvc-dump.sh', const rl = readline.createInterface({
opts.workspace, input: fs.createReadStream(dumpChanges),
opts.configFile, //output: process.stdout,
opts.iniFile console: false
); });
}
async run(opts) { for await (const line of rl) {
const Docker = require('./docker'); changes.push({
const container = new Docker(opts.code, opts.workspace); mark: line.charAt(0),
await container.run(); path: line.substr(1)
} });
}
async start(opts) { return changes;
const Docker = require('./docker');
const container = new Docker(opts.code, opts.workspace);
await container.start();
} }
} }
(async function() {
try {
const myvc = new MyVC();
if (command != 'load' && myvc[command]) {
await myvc.load(opts);
await myvc[command](opts);
} else
throw new Error (`Unknown command '${command}'`);
} catch (err) {
if (err.name == 'Error')
console.error('Error:'.gray, err.message.red);
else
throw err;
}
})();
module.exports = MyVC; module.exports = MyVC;
if (require.main === module)
new MyVC().run();

93
myvc-dump.js Normal file
View File

@ -0,0 +1,93 @@
const MyVC = require('./index');
const fs = require('fs-extra');
const path = require('path');
const docker = require('./docker');
class Dump {
get myOpts() {
return {
alias: {
env: 'e'
},
default: {
env: 'production'
}
};
}
async run(myvc, opts) {
const conn = await myvc.dbConnect();
const dumpDir = `${opts.workspace}/dump`;
if (!await fs.pathExists(dumpDir))
await fs.mkdir(dumpDir);
const dumpFile = `${dumpDir}/.dump.sql`;
const dumpStream = await fs.createWriteStream(dumpFile);
const execOptions = {
stdio: [
process.stdin,
dumpStream,
process.stderr
]
};
await docker.build(__dirname, {
tag: 'myvc/client',
file: path.join(__dirname, 'Dockerfile.client')
}, !!this.opts.debug);
let dumpArgs = [
`--defaults-file=${opts.iniFile}`,
'--default-character-set=utf8',
'--no-data',
'--comments',
'--triggers',
'--routines',
'--events',
'--databases'
];
dumpArgs = dumpArgs.concat(opts.schemas);
await this.dockerRun('myvc-dump.sh', dumpArgs, execOptions);
const fixturesArgs = [
`--defaults-file=${opts.iniFile}`,
'--no-create-info',
'--skip-triggers',
'--insert-ignore'
];
for (const schema in opts.fixtures) {
await dumpStream.write(
`USE ${conn.escapeId(schema, true)};\n`,
'utf8'
);
const args = fixturesArgs.concat([schema], opts.fixtures[schema]);
await this.dockerRun('mysqldump', args, execOptions);
}
await dumpStream.end();
const version = await myvc.fetchDbVersion();
if (version){
await fs.writeFile(
`${dumpDir}/.dump.json`,
JSON.stringify(version)
);
}
}
async dockerRun(command, args, execOptions) {
const commandArgs = [command].concat(args);
await docker.run('myvc/client', commandArgs, {
volume: `${this.opts.workspace}:/workspace`
}, execOptions);
}
}
module.exports = Dump;
if (require.main === module)
new MyVC().run(Dump);

View File

@ -1,34 +1,3 @@
#!/bin/bash #!/bin/bash
set -e
CONFIG_FILE=$1 mysqldump $@ | sed 's/ AUTO_INCREMENT=[0-9]* //g'
INI_FILE=$2
DUMP_DIR="dump"
DUMP_FILE="$DUMP_DIR/.dump.sql"
echo "SELECT 1;" | mysql --defaults-file="$INI_FILE" >> /dev/null
SCHEMAS=( $(jq -r ".schemas[]" "$CONFIG_FILE") )
mkdir -p "$DUMP_DIR"
mysqldump \
--defaults-file="$INI_FILE" \
--default-character-set=utf8 \
--no-data \
--comments \
--triggers --routines --events \
--databases \
${SCHEMAS[@]} \
| sed 's/ AUTO_INCREMENT=[0-9]* //g' \
> "$DUMP_FILE"
for SCHEMA in $(jq -r ".fixtures | keys[]" "$CONFIG_FILE"); do
TABLES=( $(jq -r ".fixtures.$SCHEMA[]" "$CONFIG_FILE") )
echo "USE \`$SCHEMA\`;" >> "$DUMP_FILE"
mysqldump \
--defaults-file="$INI_FILE" \
--no-create-info \
--skip-triggers \
$SCHEMA ${TABLES[@]} >> "$DUMP_FILE"
done

20
myvc-init.js Executable file
View File

@ -0,0 +1,20 @@
const MyVC = require('./index');
const fs = require('fs-extra');
class Init {
async run(myvc, opts) {
const templateDir = `${__dirname}/template`;
const templates = await fs.readdir(templateDir);
for (let template of templates) {
const dst = `${opts.workspace}/${template}`;
if (!await fs.pathExists(dst))
await fs.copy(`${templateDir}/${template}`, dst);
}
}
}
module.exports = Init;
if (require.main === module)
new MyVC().run(Init);

View File

@ -1,39 +1,64 @@
const MyVC = require('./index');
const fs = require('fs-extra'); const fs = require('fs-extra');
const mysql = require('mysql2/promise');
const ejs = require('ejs'); const ejs = require('ejs');
class Pull {
async run(myvc, opts) {
const conn = await myvc.dbConnect();
for (const exporter of exporters)
await exporter.init();
const exportDir = `${opts.workspace}/routines`;
if (await fs.pathExists(exportDir))
await fs.remove(exportDir, {recursive: true});
await fs.mkdir(exportDir);
for (const schema of opts.schemas) {
let schemaDir = `${exportDir}/${schema}`;
if (!await fs.pathExists(schemaDir))
await fs.mkdir(schemaDir);
for (const exporter of exporters)
await exporter.export(conn, exportDir, schema);
}
}
}
class Exporter { class Exporter {
constructor(objectName, callback) { constructor(objectName) {
this.objectName = objectName; this.objectName = objectName;
this.callback = callback;
this.dstDir = `${objectName}s`; this.dstDir = `${objectName}s`;
}
const templateDir = `${__dirname}/exporters/${objectName}`; async init() {
this.query = fs.readFileSync(`${templateDir}.sql`, 'utf8'); const templateDir = `${__dirname}/exporters/${this.objectName}`;
this.query = await fs.readFile(`${templateDir}.sql`, 'utf8');
const templateFile = fs.readFileSync(`${templateDir}.ejs`, 'utf8'); const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
this.template = ejs.compile(templateFile); this.template = ejs.compile(templateFile);
if (fs.existsSync(`${templateDir}.js`)) if (await fs.pathExists(`${templateDir}.js`))
this.formatter = require(`${templateDir}.js`); this.formatter = require(`${templateDir}.js`);
} }
async export(conn, exportDir, schema) { async export(conn, exportDir, schema) {
const res = await conn.execute(this.query, [schema]); const [res] = await conn.query(this.query, [schema]);
if (!res[0].length) return; if (!res.length) return;
const routineDir = `${exportDir}/${schema}/${this.dstDir}`; const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
if (!fs.existsSync(routineDir)) if (!await fs.pathExists(routineDir))
fs.mkdirSync(routineDir); await fs.mkdir(routineDir);
for (let params of res[0]) { for (const params of res) {
if (this.formatter) if (this.formatter)
this.formatter(params, schema) this.formatter(params, schema)
params.schema = schema; params.schema = schema;
let sql = this.template(params); let sql = this.template(params);
fs.writeFileSync(`${routineDir}/${params.name}.sql`, sql); await fs.writeFile(`${routineDir}/${params.name}.sql`, sql);
} }
} }
} }
@ -46,35 +71,7 @@ const exporters = [
new Exporter('event') new Exporter('event')
]; ];
// Exports objects for all schemas module.exports = Pull;
module.exports = async function main(workspace, schemas, dbConf) { if (require.main === module)
const conn = await mysql.createConnection(dbConf); new MyVC().run(Pull);
conn.queryFromFile = function(file, params) {
return this.execute(
fs.readFileSync(`${file}.sql`, 'utf8'),
params
);
}
try {
const exportDir = `${workspace}/routines`;
if (fs.existsSync(exportDir))
fs.removeSync(exportDir, {recursive: true});
fs.mkdirSync(exportDir);
for (let schema of schemas) {
let schemaDir = `${exportDir}/${schema}`;
if (!fs.existsSync(schemaDir))
fs.mkdirSync(schemaDir);
for (let exporter of exporters)
await exporter.export(conn, exportDir, schema);
}
} catch(err) {
console.error(err);
} finally {
await conn.end();
}
};

419
myvc-push.js Normal file
View File

@ -0,0 +1,419 @@
const MyVC = require('./index');
const fs = require('fs-extra');
const typeMap = {
events: {
name: 'EVENT',
abbr: 'EVNT',
color: 'cyan'
},
functions: {
name: 'FUNCTION',
abbr: 'FUNC',
color: 'cyan'
},
procedures: {
name: 'PROCEDURE',
abbr: 'PROC',
color: 'yellow'
},
triggers: {
name: 'TRIGGER',
abbr: 'TRIG',
color: 'blue'
},
views: {
name: 'VIEW',
abbr: 'VIEW',
color: 'magenta'
},
};
class Routine {
construct(path, mark) {
const path = path
const split = path.split('/');
const fullPath = `${this.opts.workspace}/routines/${path}.sql`;
const schema = split[0];
const type = typeMap[split[1]];
const name = split[2];
Object.assign(this, {
path,
mark: mark,
exists: await fs.pathExists(fullPath),
type,
schema,
name,
fullName: `${schema}.${name}`,
isRoutine: ['FUNC', 'PROC'].indexOf(type.abbr) !== -1
});
}
}
const tokens = {
string: {
start: '\'',
end: '\'',
escape: char => char == '\'' || char == '\\'
},
id: {
start: '`',
end: '`',
escape: char => char == '`'
},
multiComment: {
start: '/*',
end: '*/',
escape: () => false
},
singleComment: {
start: '-- ',
end: '\n',
escape: () => false
}
};
const tokenIndex = new Map();
for (const tokenId in tokens) {
const token = tokens[tokenId];
tokenIndex.set(token.start[0], token);
}
class Push {
get myOpts() {
return {
alias: {
force: 'f',
user: 'u',
applyUncommited: 'a'
}
};
}
async run(myvc, opts) {
const conn = await myvc.dbConnect();
this.conn = conn;
const version = await myvc.fetchDbVersion() || {};
console.log(
`Database information:`
+ `\n -> Version: ${version.number}`
+ `\n -> Commit: ${version.gitCommit}`
);
if (!/^[0-9]*$/.test(version.number))
throw new Error('Wrong database version');
if (!version.number)
version.number = '00000';
if (opts.user) {
const [[user]] = conn.query(
`SELECT LEFT(USER(), INSTR(USER(), '@') - 1)`
);
let [[userVersion]] = await conn.query(
`SELECT number, gitCommit
FROM versionUser
WHERE code = ? AND user = ?`,
[opts.code, user]
);
userVersion = userVersion || {};
console.log(
`User information:`
+ `\n -> User: ${user}`
+ `\n -> Version: ${userVersion.number}`
+ `\n -> Commit: ${userVersion.gitCommit}`
);
if (userVersion.number > version.number)
version = userVersion;
}
if (opts.env == 'production') {
console.log(
'\n ( ( ) ( ( ) ) '
+ '\n )\\ ))\\ ) ( /( )\\ ) ( ))\\ ) ( /( ( /( '
+ '\n(()/(()/( )\\()|()/( ( )\\ ) /(()/( )\\()) )\\())'
+ '\n /(_))(_)|(_)\\ /(_)) )\\ (((_) ( )(_))(_)|(_)\\ ((_)\\ '
+ '\n(_))(_)) ((_|_))_ _ ((_))\\___(_(_()|__)) ((_) _((_)'
+ '\n| _ \\ _ \\ / _ \\| \\| | | ((/ __|_ _|_ _| / _ \\| \\| |'
+ '\n| _/ /| (_) | |) | |_| || (__ | | | | | (_) | . |'
+ '\n|_| |_|_\\ \\___/|___/ \\___/ \\___| |_| |___| \\___/|_|\\_|'
+ '\n'
);
if (!opts.force) {
const readline = require('readline');
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
const answer = await new Promise(resolve => {
rl.question('Are you sure? (Default: no) [yes|no] ', resolve);
});
rl.close();
if (answer !== 'yes')
throw new Error('Changes aborted');
}
}
console.log('Applying versions.');
let nChanges = 0;
const versionsDir = `${opts.workspace}/versions`;
function logVersion(type, version, name) {
console.log('', type.bold, `[${version.bold}]`, name);
}
if (await fs.pathExists(versionsDir)) {
const versionDirs = await fs.readdir(versionsDir);
for (const versionDir of versionDirs) {
if (versionDir == 'README.md')
continue;
const match = versionDir.match(/^([0-9]{5})-([a-zA-Z0-9]+)?$/);
if (!match) {
logVersion('[W]'.yellow, '?????', versionDir);
continue;
}
const dirVersion = match[1];
const versionName = match[2];
if (version.number >= dirVersion) {
logVersion('[I]'.blue, dirVersion, versionName);
continue;
}
logVersion('[+]'.green, dirVersion, versionName);
const scriptsDir = `${versionsDir}/${versionDir}`;
const scripts = await fs.readdir(scriptsDir);
for (const script of scripts) {
if (!/^[0-9]{2}-[a-zA-Z0-9_]+\.sql$/.test(script)) {
console.log(` - Ignoring wrong file name: ${script}`);
continue;
}
console.log(` - ${script}`);
await this.queryFromFile(pushConn, `${scriptsDir}/${script}`);
nChanges++;
}
//await this.updateVersion(nChanges, 'number', dirVersion);
}
}
const pushConn = await myvc.createConnection();
console.log('Applying changed routines.');
let nRoutines = 0;
let changes = await fs.pathExists(`${opts.workspace}/.git`)
? await myvc.changedRoutines(version.gitCommit)
: await myvc.cachedChanges();
changes = await this.parseChanges(changes);
await conn.query(
`CREATE TEMPORARY TABLE tProcsPriv
ENGINE = MEMORY
SELECT * FROM mysql.procs_priv LIMIT 0`
);
const routines = [];
for (const change of changes)
if (change.isRoutine)
routines.push([change.schema, change.name]);
if (routines.length) {
await conn.query(
`DROP TEMPORARY TABLE IF EXISTS tProcsPriv`
);
await conn.query(
`CREATE TEMPORARY TABLE tProcsPriv
ENGINE = MEMORY
SELECT * FROM mysql.procs_priv
WHERE (Db, Routine_name) IN (?)`,
[routines]
);
}
for (const change of changes) {
const actionMsg = change.exists ? '[+]'.green : '[-]'.red;
const typeMsg = `[${change.type.abbr}]`[change.type.color];
console.log('', actionMsg.bold, typeMsg.bold, change.fullName);
if (change.exists)
await this.queryFromFile(pushConn, `routines/${change.path}.sql`);
else {
const escapedName =
conn.escapeId(change.schema, true) + '.' +
conn.escapeId(change.name, true);
const query = `DROP ${change.type.name} IF EXISTS ${escapedName}`;
await conn.query(query);
}
nRoutines++;
}
if (routines.length) {
await conn.query(
`INSERT IGNORE INTO mysql.procs_priv
SELECT * FROM tProcsPriv`
);
await conn.query(
`DROP TEMPORARY TABLE tProcsPriv`
);
}
await pushConn.end();
if (nRoutines > 0) {
await conn.query('FLUSH PRIVILEGES');
//await this.updateVersion(nRoutines, 'gitCommit', version.gitCommit);
console.log(` -> ${nRoutines} routines have changed.`);
} else
console.log(` -> No routines changed.`);
}
async parseChanges(changes) {
const routines = [];
for (const change of changes)
routines.push(new Routine(change));
return routines;
}
async updateVersion(nChanges, column, value) {
if (nChanges == 0) return;
const {opts} = this;
column = this.conn.escapeId(column, true);
if (opts.user) {
await this.conn.query(
`INSERT INTO versionUser
SET code = ?,
user = ?,
${column} = ?
ON DUPLICATE KEY UPDATE
${column} = VALUES(${column})`,
[
opts.code,
user,
value
]
);
} else {
await this.conn.query(
`INSERT INTO version
SET code = ?,
${column} = ?
ON DUPLICATE KEY UPDATE
${column} = VALUES(${column})`,
[
opts.code,
value
]
);
}
}
/**
* Executes an SQL script.
*
* @param {String} file Path to the SQL script
* @returns {Array<Result>} The resultset
*/
async queryFromFile(conn, file) {
let results = [];
const stmts = this.querySplit(await fs.readFile(file, 'utf8'));
for (const stmt of stmts)
results = results.concat(await conn.query(stmt));
return results;
}
/**
* Splits an SQL muti-query into a single-query array, it does an small
* parse to correctly handle the DELIMITER statement.
*
* @param {Array<String>} stmts The splitted SQL statements
*/
querySplit(sql) {
const stmts = [];
let i,
char,
token,
escaped,
stmtStart;
let delimiter = ';';
const delimiterRe = /\s*delimiter\s+(\S+)[^\S\r\n]*(?:\r?\n|\r)/yi;
function begins(str) {
let j;
for (j = 0; j < str.length; j++)
if (sql[i + j] != str[j])
return false;
i += j;
return true;
}
for (i = 0; i < sql.length;) {
stmtStart = i;
delimiterRe.lastIndex = i;
const match = sql.match(delimiterRe);
if (match) {
delimiter = match[1];
i += match[0].length;
continue;
}
while (i < sql.length) {
char = sql[i];
if (token) {
if (!escaped && begins(token.end))
token = null;
else {
escaped = !escaped && token.escape(char);
i++;
}
} else {
if (begins(delimiter)) break;
const tok = tokenIndex.get(char);
if (tok && begins(tok.start))
token = tok;
else
i++;
}
}
const len = i - stmtStart - delimiter.length;
stmts.push(sql.substr(stmtStart, len));
}
const len = stmts.length;
if (len > 1 && /^\s*$/.test(stmts[len - 1]))
stmts.pop();
return stmts;
}
}
module.exports = Push;
if (require.main === module)
new MyVC().run(Push);

View File

@ -1,394 +0,0 @@
#!/bin/bash
FORCE=FALSE
IS_USER=FALSE
APPLY_UNCOMMITED=FALSE
WORKSPACE="$PWD"
error() {
local MESSAGE=$1
>&2 echo "[ERR] $MESSAGE"
exit 1
}
warn() {
local MESSAGE=$1
>&2 echo "[WAR] $MESSAGE"
}
log() {
local MESSAGE=$1
echo "[LOG] $MESSAGE"
}
while getopts ":fuae:" option
do
case $option in
f)
FORCE=TRUE
;;
u)
IS_USER=TRUE
;;
e)
ENV="$OPTARG"
;;
a)
APPLY_UNCOMMITED=TRUE
;;
\?|:)
error "Usage: $0 [-f] [-u] [-a] [-e environment]"
;;
esac
done
shift $(($OPTIND - 1))
# Load configuration
CONFIG_FILE="myvc.config.json"
if [ ! -f "$CONFIG_FILE" ]; then
error "Config file not found: $CONFIG_FILE"
fi
DIR="$(dirname "${BASH_SOURCE[0]}")"
CODE=$(jq -r ".code" "$CONFIG_FILE")
# Load database configuration
if [ -z "$ENV" ]; then
INI_FILE="$DIR/db.ini"
else
INI_FILE="$WORKSPACE/remotes/$ENV.ini"
fi
if [ ! -f "$INI_FILE" ]; then
error "Database config file not found: $INI_FILE"
fi
log "Using config file: $INI_FILE"
echo "SELECT 1;" | mysql --defaults-file="$INI_FILE" >> /dev/null
if [ "$?" -ne "0" ]; then
error "Cannot connect to database."
fi
# Fetch git information
if [ ! -d "$WORKSPACE/.git" ]; then
error "Git directory not initialized."
fi
COMMIT_SHA=$(git rev-parse HEAD)
if [ "$?" -ne "0" ]; then
error "Cannot fetch Git HEAD."
fi
log "HEAD: $COMMIT_SHA"
git diff-index --quiet --cached HEAD --
STAGED=$?
git diff-files --quiet
CHANGED=$?
UNTRACKED=`git ls-files --others --exclude-standard`
if [ "$STAGED" == "1" ] || [ "$CHANGED" == "1" ] || [ -n "$UNTRACKED" ]; then
if [ "$APPLY_UNCOMMITED" == "TRUE" ]; then
warn "You are applying uncommited changes."
else
error "You have uncommited changes, commit them before pushing or use -a option."
fi
fi
# Query functions
dbQuery() {
local SQL=$1
local SCHEMA=$2
RETVAL=`echo "$SQL" | mysql --defaults-file="$INI_FILE" --silent --raw "$SCHEMA"`
}
dbExec() {
local SQL=$1
local SCHEMA=$2
echo "$SQL" | mysql --defaults-file="$INI_FILE" "$SCHEMA"
}
dbExecFromFile() {
local FILE_PATH=$1
local SCHEMA=$2
mysql --defaults-file="$INI_FILE" --default-character-set=utf8 --comments "$SCHEMA" < $FILE_PATH
}
# Fetch database version
VERSION_SCHEMA=$(jq -r ".versionSchema" "$CONFIG_FILE")
if [ "$VERSION_SCHEMA" == "null" ]; then
VERSION_SCHEMA="myvc"
fi
read -r -d '' SQL << EOM
SELECT COUNT(*)
FROM information_schema.tables
WHERE TABLE_SCHEMA = '$VERSION_SCHEMA'
AND TABLE_NAME = 'version'
EOM
dbQuery "$SQL"
TABLE_EXISTS=$RETVAL
SCHEMA="\`$VERSION_SCHEMA\`"
if [ "$TABLE_EXISTS" -eq "0" ]; then
dbExec "CREATE DATABASE IF NOT EXISTS $SCHEMA"
dbExecFromFile "$DIR/structure.sql" "$VERSION_SCHEMA"
log "Version tables created into $SCHEMA schema."
fi
dbQuery "SELECT number, gitCommit FROM $SCHEMA.version WHERE code = '$CODE'"
RETVAL=($RETVAL)
DB_VERSION=${RETVAL[0]}
DB_COMMIT=${RETVAL[1]}
log "Database information:"
log " -> Version: $DB_VERSION"
log " -> Commit: $DB_COMMIT"
if [[ ! "$DB_VERSION" =~ ^[0-9]*$ ]]; then
error "Wrong database version."
fi
if [ -z "$DB_VERSION" ]; then
DB_VERSION=00000
fi
if [ "$IS_USER" == "TRUE" ]; then
log "User information:"
dbQuery "SELECT LEFT(USER(), INSTR(USER(), '@') - 1)"
DB_USER=$RETVAL
log " -> Name: $DB_USER"
dbQuery "SELECT number, gitCommit FROM $SCHEMA.versionUser WHERE code = '$CODE' AND user = '$DB_USER'"
RETVAL=($RETVAL)
USER_VERSION=${RETVAL[0]}
USER_COMMIT=${RETVAL[1]}
log " -> Version: $USER_VERSION"
log " -> Commit: $USER_COMMIT"
if [ ! -z "$USER_VERSION" ]; then
if [ "$USER_VERSION" -gt "$DB_VERSION" ]; then
DB_VERSION=$USER_VERSION
DB_COMMIT=$USER_COMMIT
fi
fi
fi
# Production protection
if [ "$ENV" == "production" ]; then
echo ""
echo " ( ( ) ( ( ) ) "
echo " )\ ))\ ) ( /( )\ ) ( ))\ ) ( /( ( /( "
echo "(()/(()/( )\()|()/( ( )\ ) /(()/( )\()) )\())"
echo " /(_))(_)|(_)\ /(_)) )\ (((_) ( )(_))(_)|(_)\ ((_)\ "
echo "(_))(_)) ((_|_))_ _ ((_))\___(_(_()|__)) ((_) _((_)"
echo "| _ \ _ \ / _ \| \| | | ((/ __|_ _|_ _| / _ \| \| |"
echo "| _/ /| (_) | |) | |_| || (__ | | | | | (_) | . |"
echo "|_| |_|_\ \___/|___/ \___/ \___| |_| |___| \___/|_|\_|"
echo ""
if [ "$FORCE" != "TRUE" ]; then
read -p "[INT] Are you sure? (Default: no) [yes|no]: " ANSWER
if [ "$ANSWER" != "yes" ]; then
log "Aborting changes."
exit
fi
fi
fi
# Apply versions
N_CHANGES=0
VERSIONS_DIR="$WORKSPACE/versions"
if [ -d "$VERSIONS_DIR" ]; then
LAST_APPLIED_VERSION=$DB_VERSION
for DIR_PATH in "$VERSIONS_DIR/"*; do
DIR_NAME=$(basename $DIR_PATH)
DIR_VERSION=${DIR_NAME:0:5}
if [ "$DIR_NAME" == "README.md" ]; then
continue
fi
if [[ ! "$DIR_NAME" =~ ^[0-9]{5}(-[a-zA-Z0-9]+)?$ ]]; then
warn "Ignoring wrong directory name: $DIR_NAME"
continue
fi
if [ "$DB_VERSION" -ge "$DIR_VERSION" ]; then
log "Ignoring already applied version: $DIR_NAME"
continue
fi
log "Applying version: $DIR_NAME"
for FILE in "$DIR_PATH/"*; do
FILE_NAME=$(basename "$FILE")
if [ "$FILE_NAME" == "*" ]; then
continue
fi
if [[ ! "$FILE_NAME" =~ ^[0-9]{2}-[a-zA-Z0-9_]+\.sql$ ]]; then
warn "Ignoring wrong file name: $FILE_NAME"
continue
fi
log " -> $FILE_NAME"
dbExecFromFile "$FILE"
N_CHANGES=$((N_CHANGES + 1))
done
LAST_APPLIED_VERSION=$DIR_VERSION
done
fi
# Apply routines
applyRoutines() {
FILES_CMD=$1
for FILE_PATH in `$FILES_CMD`; do
FILE_NAME=$(basename $FILE_PATH)
if [[ ! "$FILE_PATH" =~ ^routines/ ]]; then
continue
fi
if [[ ! "$FILE_NAME" =~ ^[a-zA-Z0-9_]+\.sql$ ]]; then
warn "Ignoring wrong file name: $FILE_NAME"
continue
fi
FILE_REL_PATH=${FILE_PATH//routines\/}
IFS='/' read -ra SPLIT <<< "$FILE_REL_PATH"
SCHEMA=${SPLIT[0]}
NAME=${SPLIT[2]}
NAME=${NAME//\.sql/}
ROUTINE_TYPE=${SPLIT[1]}
case "$ROUTINE_TYPE" in
events)
ROUTINE_TYPE=EVENT
;;
functions)
ROUTINE_TYPE=FUNCTION
;;
procedures)
ROUTINE_TYPE=PROCEDURE
;;
triggers)
ROUTINE_TYPE=TRIGGER
;;
views)
ROUTINE_TYPE=VIEW
;;
*)
warn "Ignoring unknown routine type: $ROUTINE_TYPE"
continue
;;
esac
ROUTINE_NAME="\`$SCHEMA\`.\`$NAME\`"
if [[ -f "$FILE_PATH" ]]; then
ACTION="REPLACE"
else
ACTION="DROP"
fi
log " -> $ACTION: $ROUTINE_TYPE $ROUTINE_NAME"
if [ "$ACTION" == "REPLACE" ]; then
dbExecFromFile "$FILE_PATH" "$SCHEMA"
else
dbExec "DROP $ROUTINE_TYPE IF EXISTS $ROUTINE_NAME"
fi
ROUTINES_CHANGED=$((ROUTINES_CHANGED + 1))
done
}
ROUTINES_CHANGED=0
ROUTINES_DIR="$WORKSPACE/routines"
if [ -d "$ROUTINES_DIR" ]; then
log "Applying changed routines."
PROCS_FILE=.procs-priv.sql
mysqldump \
--defaults-file="$INI_FILE" \
--no-create-info \
--skip-triggers \
--insert-ignore \
mysql procs_priv > "$PROCS_FILE"
if [ -z "$DB_COMMIT" ]; then
applyRoutines "find routines -type f"
else
applyRoutines "git diff --name-only --diff-filter=D $DB_COMMIT -- routines"
applyRoutines "git diff --name-only --diff-filter=d $DB_COMMIT -- routines"
fi
if [ "$ROUTINES_CHANGED" -gt "0" ]; then
dbExecFromFile "$PROCS_FILE" "mysql"
if [ "$?" -eq "0" ]; then
dbExec "FLUSH PRIVILEGES"
rm "$PROCS_FILE"
else
warn "An error ocurred when restoring routine privileges, backup saved at $PROCS_FILE"
fi
log " -> $ROUTINES_CHANGED routines have changed."
else
log " -> No routines changed."
rm "$PROCS_FILE"
fi
fi
N_CHANGES=$((N_CHANGES + ROUTINES_CHANGED))
# Display summary
if [ "$N_CHANGES" -gt "0" ]; then
if [ "$IS_USER" == "TRUE" ]; then
SQL=(
"INSERT INTO $SCHEMA.versionUser SET "
"code = '$CODE', "
"user = '$DB_USER', "
"number = '$LAST_APPLIED_VERSION', "
"gitCommit = '$COMMIT_SHA' "
"ON DUPLICATE KEY UPDATE "
"number = VALUES(number), "
"gitCommit = VALUES(gitCommit)"
)
else
SQL=(
"INSERT INTO $SCHEMA.version SET "
"code = '$CODE', "
"number = '$LAST_APPLIED_VERSION', "
"gitCommit = '$COMMIT_SHA' "
"ON DUPLICATE KEY UPDATE "
"number = VALUES(number), "
"gitCommit = VALUES(gitCommit)"
)
fi
dbExec "${SQL[*]}"
log "Changes applied succesfully."
else
log "No changes applied."
fi

112
myvc-run.js Normal file
View File

@ -0,0 +1,112 @@
const MyVC = require('./index');
const docker = require('./docker');
const fs = require('fs-extra');
const Server = require('./server/server');
/**
* Builds the database image and runs a container. It only rebuilds the
* image when fixtures have been modified or when the day on which the
* image was built is different to today. Some workarounds have been used
* to avoid a bug with OverlayFS driver on MacOS.
*
* @param {Boolean} ci continuous integration environment argument
*/
class Run {
get myOpts() {
return {
alias: {
ci: 'c',
random: 'r'
}
};
}
async run(myvc, opts) {
const server = new Server(opts.code, opts.workspace);
await server.run();
const dumpDir = `${opts.workspace}/dump`;
const dumpInfo = `${dumpDir}/.dump.json`;
if (await fs.pathExists(dumpInfo)) {
const version = JSON.parse(
await fs.readFileSync(dumpInfo, 'utf8')
);
const fd = await fs.open(`${dumpDir}/.changes`, 'w+');
const changes = await myvc.changedRoutines(version.gitCommit);
for (const change of changes)
fs.write(fd, change.mark + change.path + '\n');
await fs.close(fd);
}
const dockerfilePath = path.join(__dirname, 'server', 'Dockerfile');
await docker.build(__dirname, {
tag: 'myvc/server',
file: `${dockerfilePath}.server`
});
const today = new Date();
const pad = v => v < 10 ? '0' + v : v;
const year = today.getFullYear();
const month = pad(today.getMonth() + 1);
const day = pad(today.getDate());
const stamp = `${year}-${month}-${day}`;
await docker.build(__dirname, {
tag: this.imageTag,
file: `${dockerfilePath}.dump`,
buildArg: `STAMP=${stamp}`
});
let runOptions;
if (this.isRandom)
runOptions = {publish: '3306'};
else {
runOptions = {
name: this.name,
publish: `3306:${this.dbConf.port}`
};
try {
await this.rm();
} catch (e) {}
}
const runChown = process.platform != 'linux';
Object.assign(runOptions, null, {
env: `RUN_CHOWN=${runChown}`,
detach: true
});
const ct = await docker.run(this.imageTag, null, runOptions);
try {
if (this.isRandom) {
const netSettings = await ct.inspect({
filter: '{{json .NetworkSettings}}'
});
if (opts.ci)
this.dbConf.host = netSettings.Gateway;
this.dbConf.port = netSettings.Ports['3306/tcp'][0]['HostPort'];
}
await this.wait();
} catch (err) {
if (this.isRandom)
await this.rm();
throw err;
}
}
}
module.exports = Run;
if (require.main === module)
new MyVC().run(Run);

42
myvc-start.js Normal file
View File

@ -0,0 +1,42 @@
const MyVC = require('./index');
const docker = require('./docker');
const Server = require('./server/server');
/**
* Does the minium effort to start the database container, if it doesn't
* exists calls the run command, if it is started does nothing. Keep in
* mind that when you do not rebuild the docker you may be using an outdated
* version of it.
*/
class Start {
async run(myvc, opts) {
const server = new Server(opts.code, opts.workspace);
await server.start();
let status;
try {
status = await docker.inspect(opts.code, {
filter: '{{json .State.Status}}'
});
} catch (err) {
return await this.run();
}
switch (status) {
case 'running':
return;
case 'exited':
await docker.start(opts.code);
await this.wait();
return;
default:
throw new Error(`Unknown docker status: ${status}`);
}
}
}
module.exports = Start;
if (require.main === module)
new MyVC().run(Start);

7
myvc.default.yml Executable file
View File

@ -0,0 +1,7 @@
versionSchema: myvc
schemas:
- myvc
fixtures:
myvc:
- version
- versionUser

View File

@ -1,3 +1,4 @@
#!/usr/bin/env node #!/usr/bin/env node
require('./'); const MyVC = require('./');
new MyVC().run();

1164
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,10 @@
{ {
"name": "myvc", "name": "myvc",
"version": "1.0.18", "version": "1.1.0",
"author": "Verdnatura Levante SL", "author": "Verdnatura Levante SL",
"description": "MySQL Version Control", "description": "MySQL Version Control",
"license": "GPL-3.0", "license": "GPL-3.0",
"bin": { "bin": "myvc.js",
"myvc": "myvc.js",
"myvc-push": "myvc-push.sh"
},
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://github.com/verdnatura/myvc.git" "url": "https://github.com/verdnatura/myvc.git"
@ -21,6 +18,7 @@
"getopts": "^2.2.5", "getopts": "^2.2.5",
"ini": "^1.3.5", "ini": "^1.3.5",
"mysql2": "^2.2.5", "mysql2": "^2.2.5",
"nodegit": "^0.27.0",
"require-yaml": "0.0.1" "require-yaml": "0.0.1"
}, },
"main": "index.js", "main": "index.js",

View File

@ -5,34 +5,51 @@ ENV TZ Europe/Madrid
ARG DEBIAN_FRONTEND=noninteractive ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends curl ca-certificates \ && apt-get install -y --no-install-recommends \
curl \
ca-certificates \
tzdata \
&& curl -sL https://apt.verdnatura.es/conf/verdnatura.gpg | apt-key add - \ && curl -sL https://apt.verdnatura.es/conf/verdnatura.gpg | apt-key add - \
&& echo "deb http://apt.verdnatura.es/ jessie main" > /etc/apt/sources.list.d/vn.list \ && echo "deb http://apt.verdnatura.es/ jessie main" > /etc/apt/sources.list.d/vn.list \
&& apt-get update \ && apt-get update \
&& apt-get install -y \ && apt-get install -y \
git \
jq \
vn-mariadb \ vn-mariadb \
&& apt-get purge -y --auto-remove curl ca-certificates \ && curl -sL https://deb.nodesource.com/setup_14.x | bash - \
&& apt-get install -y --no-install-recommends \
nodejs \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
COPY docker/docker.cnf /etc/mysql/conf.d/
COPY \
docker/docker-init.sh \
docker/docker-temp-start.sh \
docker/docker-temp-stop.sh \
docker/docker-dump.sh \
docker/docker-start.sh \
myvc-push.sh \
structure.sql \
db.ini \
/usr/local/bin/
RUN mkdir /mysql-data \ RUN mkdir /mysql-data \
&& chown -R mysql:mysql /mysql-data && chown -R mysql:mysql /mysql-data
WORKDIR /myvc
COPY \
package.json \
package-lock.json \
./
RUN npm install --only=prod
COPY \
structure.sql \
index.js \
myvc.js \
myvc.default.yml \
db.ini \
./
RUN ln -s /myvc/myvc.js /usr/local/bin/myvc
WORKDIR /workspace WORKDIR /workspace
COPY server/docker.cnf /etc/mysql/conf.d/
COPY \
server/docker-init.sh \
server/docker-temp-start.sh \
server/docker-temp-stop.sh \
server/docker-dump.sh \
server/docker-start.sh \
/usr/local/bin/
USER mysql USER mysql
ENTRYPOINT ["docker-start.sh"] ENTRYPOINT ["docker-start.sh"]

View File

@ -4,20 +4,26 @@ USER root
COPY \ COPY \
dump/.dump.sql \ dump/.dump.sql \
dump/structure.sql \ dump/structure.sql \
myvc.config.json \ dump/
COPY myvc.config.yml \
./ ./
RUN gosu mysql docker-init.sh \ RUN gosu mysql docker-init.sh \
&& docker-dump.sh dump/structure \ && docker-dump.sh dump/structure \
&& docker-dump.sh dump/.dump \ && docker-dump.sh dump/.dump \
&& gosu mysql docker-temp-stop.sh && gosu mysql docker-temp-stop.sh
COPY routines ./routines COPY routines routines
COPY versions ./versions COPY versions versions
COPY dump/fixtures.sql ./ COPY \
dump/fixtures.sql \
dump/.changes \
dump/
ARG STAMP=unknown ARG STAMP=unknown
RUN gosu mysql docker-temp-start.sh \ RUN gosu mysql docker-temp-start.sh \
&& myvc-push.sh -a \ && myvc push \
&& docker-dump.sh dump/fixtures \ && docker-dump.sh dump/fixtures \
&& gosu mysql docker-temp-stop.sh && gosu mysql docker-temp-stop.sh

View File

@ -2,8 +2,8 @@
FILE="$1.sql" FILE="$1.sql"
if [ -f "$FILE" ]; then #if [ -f "$FILE" ]; then
echo "[LOG] -> Importing $FILE" echo "[LOG] -> Importing $FILE"
export MYSQL_PWD=root export MYSQL_PWD=root
mysql -u root --default-character-set=utf8 --comments -f < "$FILE" mysql -u root --default-character-set=utf8 --comments -f < "$FILE"
fi #fi

81
server/server.js Normal file
View File

@ -0,0 +1,81 @@
const log = require('fancy-log');
const path = require('path');
const docker = require('../docker');
module.exports = class Server {
constructor(name, context) {
Object.assign(this, {
id: name,
name,
isRandom: name == null,
dbConf: {
host: 'localhost',
port: '3306',
username: 'root',
password: 'root'
},
imageTag: name || 'myvc/dump',
context
});
}
wait() {
return new Promise((resolve, reject) => {
const mysql = require('mysql2');
let interval = 100;
let elapsedTime = 0;
let maxInterval = 4 * 60 * 1000;
let myConf = {
user: this.dbConf.username,
password: this.dbConf.password,
host: this.dbConf.host,
port: this.dbConf.port
};
log('Waiting for MySQL init process...');
async function checker() {
elapsedTime += interval;
let status;
try {
status = await docker.inspect(this.id, {
filter: '{{json .State.Status}}'
});
} catch (err) {
return reject(new Error(err.message));
}
if (status === 'exited')
return reject(new Error('Docker exited, please see the docker logs for more info'));
let conn = mysql.createConnection(myConf);
conn.on('error', () => {});
conn.connect(err => {
conn.destroy();
if (!err) {
log('MySQL process ready.');
return resolve();
}
if (elapsedTime >= maxInterval)
reject(new Error(`MySQL not initialized whithin ${elapsedTime / 1000} secs`));
else
setTimeout(bindedChecker, interval);
});
}
let bindedChecker = checker.bind(this);
bindedChecker();
});
}
async rm() {
try {
await docker.stop(this.id);
await docker.rm(this.id, {volumes: true});
} catch (e) {}
}
};

11
template/myvc.config.yml Executable file
View File

@ -0,0 +1,11 @@
code: my-db
schemas:
- myvc
- my_db
fixtures:
myvc:
- version
- versionUser
my_db:
- table1
- table2

View File

@ -1,17 +0,0 @@
{
"code": "my-db",
"schemas": [
"myvc",
"my_db"
],
"fixtures": {
"myvc": [
"version",
"versionUser"
],
"my_db": [
"table1",
"table2"
]
}
}