Refactor and error fixes

This commit is contained in:
Juan Ferrer 2020-12-04 10:15:29 +01:00
parent 9d71ee7d23
commit 598ecbc535
11 changed files with 181 additions and 199 deletions

View File

@ -79,7 +79,7 @@ const docker = {
child.on('exit', code => { child.on('exit', code => {
if (code !== 0) { if (code !== 0) {
const args = JSON.stringify(execArgs); const args = JSON.stringify(execArgs);
reject(new Error(`'docker' ${args}: Exit code: ${code}`)); reject(new Error(`docker: ${args}: exit code ${code}`));
} else } else
resolve(code); resolve(code);
}); });
@ -96,7 +96,9 @@ const docker = {
}; };
class Container { class Container {
construct(id) { constructor(id) {
if (!id)
throw new Error('Container id argument is required');
this.id = id; this.id = id;
} }
@ -123,3 +125,4 @@ function camelToSnake(str) {
} }
module.exports = docker; module.exports = docker;
module.exports.Container = Container;

View File

@ -21,6 +21,7 @@ class MyVC {
alias: { alias: {
env: 'e', env: 'e',
workspace: 'w', workspace: 'w',
socket: 's',
debug: 'd', debug: 'd',
version: 'v', version: 'v',
help: 'h' help: 'h'
@ -141,7 +142,7 @@ class MyVC {
rejectUnauthorized: iniConfig.ssl_verify_server_cert != undefined rejectUnauthorized: iniConfig.ssl_verify_server_cert != undefined
} }
} }
if (!opts.env) if (opts.socket)
dbConfig.socketPath = '/var/run/mysqld/mysqld.sock'; dbConfig.socketPath = '/var/run/mysqld/mysqld.sock';
Object.assign(opts, { Object.assign(opts, {

View File

@ -2,8 +2,11 @@
const MyVC = require('./index'); const MyVC = require('./index');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path'); const path = require('path');
const docker = require('./docker'); const docker = require('./docker').docker;
/**
* Dumps structure and fixtures from remote.
*/
class Dump { class Dump {
get myOpts() { get myOpts() {
return { return {
@ -36,7 +39,7 @@ class Dump {
await docker.build(__dirname, { await docker.build(__dirname, {
tag: 'myvc/client', tag: 'myvc/client',
file: path.join(__dirname, 'Dockerfile.client') file: path.join(__dirname, 'Dockerfile.client')
}, !!this.opts.debug); }, opts.debug);
let dumpArgs = [ let dumpArgs = [
`--defaults-file=${opts.iniFile}`, `--defaults-file=${opts.iniFile}`,

View File

@ -1,94 +1,20 @@
const MyVC = require('./index'); const MyVC = require('./index');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path');
const typeMap = { /**
events: { * Pushes changes to remote.
name: 'EVENT', *
abbr: 'EVNT', * @property {Boolean} force Answer yes to all questions
color: 'cyan' * @property {Boolean} user Whether to change current user version
}, */
functions: {
name: 'FUNCTION',
abbr: 'FUNC',
color: 'cyan'
},
procedures: {
name: 'PROCEDURE',
abbr: 'PROC',
color: 'yellow'
},
triggers: {
name: 'TRIGGER',
abbr: 'TRIG',
color: 'blue'
},
views: {
name: 'VIEW',
abbr: 'VIEW',
color: 'magenta'
},
};
class Routine {
construct(path, mark) {
const path = path
const split = path.split('/');
const fullPath = `${this.opts.workspace}/routines/${path}.sql`;
const schema = split[0];
const type = typeMap[split[1]];
const name = split[2];
Object.assign(this, {
path,
mark: mark,
exists: await fs.pathExists(fullPath),
type,
schema,
name,
fullName: `${schema}.${name}`,
isRoutine: ['FUNC', 'PROC'].indexOf(type.abbr) !== -1
});
}
}
const tokens = {
string: {
start: '\'',
end: '\'',
escape: char => char == '\'' || char == '\\'
},
id: {
start: '`',
end: '`',
escape: char => char == '`'
},
multiComment: {
start: '/*',
end: '*/',
escape: () => false
},
singleComment: {
start: '-- ',
end: '\n',
escape: () => false
}
};
const tokenIndex = new Map();
for (const tokenId in tokens) {
const token = tokens[tokenId];
tokenIndex.set(token.start[0], token);
}
class Push { class Push {
get myOpts() { get myOpts() {
return { return {
alias: { alias: {
force: 'f', force: 'f',
user: 'u', user: 'u'
applyUncommited: 'a'
} }
}; };
} }
@ -218,7 +144,7 @@ class Push {
let changes = await fs.pathExists(`${opts.workspace}/.git`) let changes = await fs.pathExists(`${opts.workspace}/.git`)
? await myvc.changedRoutines(version.gitCommit) ? await myvc.changedRoutines(version.gitCommit)
: await myvc.cachedChanges(); : await myvc.cachedChanges();
changes = await this.parseChanges(changes); changes = this.parseChanges(changes);
await conn.query( await conn.query(
`CREATE TEMPORARY TABLE tProcsPriv `CREATE TEMPORARY TABLE tProcsPriv
@ -245,12 +171,14 @@ class Push {
} }
for (const change of changes) { for (const change of changes) {
const actionMsg = change.exists ? '[+]'.green : '[-]'.red; const fullPath = `${opts.workspace}/routines/${change.path}.sql`;
const exists = await fs.pathExists(fullPath);
const actionMsg = exists ? '[+]'.green : '[-]'.red;
const typeMsg = `[${change.type.abbr}]`[change.type.color]; const typeMsg = `[${change.type.abbr}]`[change.type.color];
console.log('', actionMsg.bold, typeMsg.bold, change.fullName); console.log('', actionMsg.bold, typeMsg.bold, change.fullName);
if (change.exists) if (exists)
await this.queryFromFile(pushConn, `routines/${change.path}.sql`); await this.queryFromFile(pushConn, `routines/${change.path}.sql`);
else { else {
const escapedName = const escapedName =
@ -285,7 +213,7 @@ class Push {
console.log(` -> No routines changed.`); console.log(` -> No routines changed.`);
} }
async parseChanges(changes) { parseChanges(changes) {
const routines = []; const routines = [];
for (const change of changes) for (const change of changes)
routines.push(new Routine(change)); routines.push(new Routine(change));
@ -413,6 +341,84 @@ class Push {
} }
} }
const typeMap = {
events: {
name: 'EVENT',
abbr: 'EVNT',
color: 'cyan'
},
functions: {
name: 'FUNCTION',
abbr: 'FUNC',
color: 'cyan'
},
procedures: {
name: 'PROCEDURE',
abbr: 'PROC',
color: 'yellow'
},
triggers: {
name: 'TRIGGER',
abbr: 'TRIG',
color: 'blue'
},
views: {
name: 'VIEW',
abbr: 'VIEW',
color: 'magenta'
},
};
class Routine {
constructor(change) {
const path = change.path;
const split = path.split('/');
const schema = split[0];
const type = typeMap[split[1]];
const name = split[2];
Object.assign(this, {
path,
mark: change.mark,
type,
schema,
name,
fullName: `${schema}.${name}`,
isRoutine: ['FUNC', 'PROC'].indexOf(type.abbr) !== -1
});
}
}
const tokens = {
string: {
start: '\'',
end: '\'',
escape: char => char == '\'' || char == '\\'
},
id: {
start: '`',
end: '`',
escape: char => char == '`'
},
multiComment: {
start: '/*',
end: '*/',
escape: () => false
},
singleComment: {
start: '-- ',
end: '\n',
escape: () => false
}
};
const tokenIndex = new Map();
for (const tokenId in tokens) {
const token = tokens[tokenId];
tokenIndex.set(token.start[0], token);
}
module.exports = Push; module.exports = Push;
if (require.main === module) if (require.main === module)

View File

@ -2,6 +2,7 @@
const MyVC = require('./index'); const MyVC = require('./index');
const docker = require('./docker'); const docker = require('./docker');
const fs = require('fs-extra'); const fs = require('fs-extra');
const path = require('path');
const Server = require('./server/server'); const Server = require('./server/server');
/** /**
@ -10,7 +11,8 @@ const Server = require('./server/server');
* image was built is different to today. Some workarounds have been used * image was built is different to today. Some workarounds have been used
* to avoid a bug with OverlayFS driver on MacOS. * to avoid a bug with OverlayFS driver on MacOS.
* *
* @param {Boolean} ci continuous integration environment argument * @property {Boolean} ci Continuous integration environment
* @property {Boolean} random Whether to use a random container name
*/ */
class Run { class Run {
get myOpts() { get myOpts() {
@ -23,32 +25,39 @@ class Run {
} }
async run(myvc, opts) { async run(myvc, opts) {
const server = new Server(opts.code, opts.workspace);
await server.run();
const dumpDir = `${opts.workspace}/dump`; const dumpDir = `${opts.workspace}/dump`;
const dumpInfo = `${dumpDir}/.dump.json`; const dumpInfo = `${dumpDir}/.dump.json`;
if (await fs.pathExists(dumpInfo)) { if (await fs.pathExists(dumpInfo)) {
const cache = await myvc.cachedChanges();
const version = JSON.parse( const version = JSON.parse(
await fs.readFileSync(dumpInfo, 'utf8') await fs.readFileSync(dumpInfo, 'utf8')
); );
const fd = await fs.open(`${dumpDir}/.changes`, 'w+');
const changes = await myvc.changedRoutines(version.gitCommit); const changes = await myvc.changedRoutines(version.gitCommit);
for (const change of changes) let isEqual = false;
fs.write(fd, change.mark + change.path + '\n'); if (cache && changes && cache.length == changes.lenth)
for (let i = 0; i < changes.length; i++) {
isEqual = cache[i].path == changes[i].path
&& cache[i].mark == changes[i].mark;
if (!isEqual) break;
}
await fs.close(fd); if (!isEqual) {
const fd = await fs.open(`${dumpDir}/.changes`, 'w+');
for (const change of changes)
fs.write(fd, change.mark + change.path + '\n');
await fs.close(fd);
}
} }
const dockerfilePath = path.join(__dirname, 'server', 'Dockerfile'); const dockerfilePath = path.join(__dirname, 'server', 'Dockerfile');
await docker.build(__dirname, { await docker.build(__dirname, {
tag: 'myvc/server', tag: 'myvc/server',
file: `${dockerfilePath}.server` file: dockerfilePath
}); }, opts.debug);
const today = new Date(); const today = new Date();
const pad = v => v < 10 ? '0' + v : v; const pad = v => v < 10 ? '0' + v : v;
@ -57,20 +66,23 @@ class Run {
const day = pad(today.getDate()); const day = pad(today.getDate());
const stamp = `${year}-${month}-${day}`; const stamp = `${year}-${month}-${day}`;
await docker.build(__dirname, { await docker.build(opts.workspace, {
tag: this.imageTag, tag: opts.code,
file: `${dockerfilePath}.dump`, file: `${dockerfilePath}.dump`,
buildArg: `STAMP=${stamp}` buildArg: `STAMP=${stamp}`
}); }, opts.debug);
const isRandom = opts.random;
const dbConfig = Object.assign({}, opts.dbConfig);
let runOptions; let runOptions;
if (this.isRandom) if (isRandom)
runOptions = {publish: '3306'}; runOptions = {publish: '3306'};
else { else {
runOptions = { runOptions = {
name: this.name, name: opts.code,
publish: `3306:${this.dbConf.port}` publish: `3306:${dbConfig.port}`
}; };
try { try {
await this.rm(); await this.rm();
@ -83,26 +95,28 @@ class Run {
env: `RUN_CHOWN=${runChown}`, env: `RUN_CHOWN=${runChown}`,
detach: true detach: true
}); });
const ct = await docker.run(this.imageTag, null, runOptions); const ct = await docker.run(opts.code, null, runOptions);
const server = new Server(ct, dbConfig);
try { try {
if (this.isRandom) { if (isRandom) {
const netSettings = await ct.inspect({ const netSettings = await ct.inspect({
filter: '{{json .NetworkSettings}}' format: '{{json .NetworkSettings}}'
}); });
if (opts.ci) if (opts.ci)
this.dbConf.host = netSettings.Gateway; dbConfig.host = netSettings.Gateway;
this.dbConf.port = netSettings.Ports['3306/tcp'][0]['HostPort']; dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
} }
await this.wait();
} catch (err) { } catch (err) {
if (this.isRandom) if (isRandom)
await this.rm(); await server.rm();
throw err; throw err;
} }
await server.wait();
return server;
} }
} }

View File

@ -1,7 +1,8 @@
const MyVC = require('./index'); const MyVC = require('./index');
const docker = require('./docker'); const Container = require('./docker').Container;
const Server = require('./server/server'); const Server = require('./server/server');
const Run = require('./myvc-run');
/** /**
* Does the minium effort to start the database container, if it doesn't * Does the minium effort to start the database container, if it doesn't
@ -11,28 +12,31 @@ const Server = require('./server/server');
*/ */
class Start { class Start {
async run(myvc, opts) { async run(myvc, opts) {
const server = new Server(opts.code, opts.workspace); const ct = new Container(opts.code);
await server.start();
let status; let status;
try { try {
status = await docker.inspect(opts.code, { status = await ct.inspect({
filter: '{{json .State.Status}}' format: '{{json .State.Status}}'
}); });
} catch (err) { } catch (err) {
return await this.run(); const run = new Run()
return await run.run(myvc, opts);
} }
switch (status) { switch (status) {
case 'running': case 'running':
return; break;
case 'exited': case 'exited':
await docker.start(opts.code); await ct.start();
await this.wait(); break;
return;
default: default:
throw new Error(`Unknown docker status: ${status}`); throw new Error(`Unknown docker status: ${status}`);
} }
const server = new Server(ct, opts.dbConfig);
await server.wait();
return server;
} }
} }

41
package-lock.json generated
View File

@ -1,6 +1,6 @@
{ {
"name": "myvc", "name": "myvc",
"version": "1.0.18", "version": "1.1.0",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
@ -75,14 +75,6 @@
"uri-js": "^4.2.2" "uri-js": "^4.2.2"
} }
}, },
"ansi-gray": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/ansi-gray/-/ansi-gray-0.1.1.tgz",
"integrity": "sha1-KWLPVOyXksSFEKPetSRDaGHvclE=",
"requires": {
"ansi-wrap": "0.1.0"
}
},
"ansi-regex": { "ansi-regex": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
@ -96,11 +88,6 @@
"color-convert": "^1.9.0" "color-convert": "^1.9.0"
} }
}, },
"ansi-wrap": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz",
"integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768="
},
"aproba": { "aproba": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz",
@ -282,11 +269,6 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
}, },
"color-support": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg=="
},
"colors": { "colors": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz",
@ -419,17 +401,6 @@
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
"integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU="
}, },
"fancy-log": {
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/fancy-log/-/fancy-log-1.3.3.tgz",
"integrity": "sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw==",
"requires": {
"ansi-gray": "^0.1.1",
"color-support": "^1.1.3",
"parse-node-version": "^1.0.0",
"time-stamp": "^1.0.0"
}
},
"fast-deep-equal": { "fast-deep-equal": {
"version": "3.1.3", "version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
@ -1106,11 +1077,6 @@
"p-finally": "^1.0.0" "p-finally": "^1.0.0"
} }
}, },
"parse-node-version": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz",
"integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA=="
},
"path-is-absolute": { "path-is-absolute": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
@ -1393,11 +1359,6 @@
"xtend": "^4.0.0" "xtend": "^4.0.0"
} }
}, },
"time-stamp": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/time-stamp/-/time-stamp-1.1.0.tgz",
"integrity": "sha1-dkpaEa9QVhkhsTPztE5hhofg9cM="
},
"to-buffer": { "to-buffer": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "myvc", "name": "myvc",
"version": "1.1.0", "version": "1.1.1",
"author": "Verdnatura Levante SL", "author": "Verdnatura Levante SL",
"description": "MySQL Version Control", "description": "MySQL Version Control",
"license": "GPL-3.0", "license": "GPL-3.0",
@ -13,7 +13,6 @@
"@sqltools/formatter": "^1.2.2", "@sqltools/formatter": "^1.2.2",
"colors": "^1.4.0", "colors": "^1.4.0",
"ejs": "^3.1.5", "ejs": "^3.1.5",
"fancy-log": "^1.3.3",
"fs-extra": "^8.1.0", "fs-extra": "^8.1.0",
"getopts": "^2.2.5", "getopts": "^2.2.5",
"ini": "^1.3.5", "ini": "^1.3.5",

View File

@ -34,6 +34,7 @@ COPY \
structure.sql \ structure.sql \
index.js \ index.js \
myvc.js \ myvc.js \
myvc-push.js \
myvc.default.yml \ myvc.default.yml \
db.ini \ db.ini \
./ ./

View File

@ -23,7 +23,7 @@ COPY \
ARG STAMP=unknown ARG STAMP=unknown
RUN gosu mysql docker-temp-start.sh \ RUN gosu mysql docker-temp-start.sh \
&& myvc push \ && myvc push --socket \
&& docker-dump.sh dump/fixtures \ && docker-dump.sh dump/fixtures \
&& gosu mysql docker-temp-stop.sh && gosu mysql docker-temp-stop.sh

View File

@ -1,22 +1,11 @@
const log = require('fancy-log'); const mysql = require('mysql2/promise');
const path = require('path');
const docker = require('../docker');
module.exports = class Server { module.exports = class Server {
constructor(name, context) { constructor(ct, dbConfig) {
Object.assign(this, { Object.assign(this, {
id: name, ct,
name, dbConfig
isRandom: name == null,
dbConf: {
host: 'localhost',
port: '3306',
username: 'root',
password: 'root'
},
imageTag: name || 'myvc/dump',
context
}); });
} }
@ -28,22 +17,23 @@ module.exports = class Server {
let elapsedTime = 0; let elapsedTime = 0;
let maxInterval = 4 * 60 * 1000; let maxInterval = 4 * 60 * 1000;
const dbConfig = this.dbConfig;
let myConf = { let myConf = {
user: this.dbConf.username, user: dbConfig.user,
password: this.dbConf.password, password: dbConfig.password,
host: this.dbConf.host, host: dbConfig.host,
port: this.dbConf.port port: dbConfig.port
}; };
log('Waiting for MySQL init process...'); console.log('Waiting for MySQL init process...');
async function checker() { async function checker() {
elapsedTime += interval; elapsedTime += interval;
let status; let status;
try { try {
status = await docker.inspect(this.id, { status = await this.ct.inspect({
filter: '{{json .State.Status}}' format: '{{json .State.Status}}'
}); });
} catch (err) { } catch (err) {
return reject(new Error(err.message)); return reject(new Error(err.message));
@ -52,12 +42,12 @@ module.exports = class Server {
if (status === 'exited') if (status === 'exited')
return reject(new Error('Docker exited, please see the docker logs for more info')); return reject(new Error('Docker exited, please see the docker logs for more info'));
let conn = mysql.createConnection(myConf); const conn = mysql.createConnection(myConf);
conn.on('error', () => {}); conn.on('error', () => {});
conn.connect(err => { conn.connect(err => {
conn.destroy(); conn.destroy();
if (!err) { if (!err) {
log('MySQL process ready.'); console.log('MySQL process ready.');
return resolve(); return resolve();
} }
@ -67,15 +57,15 @@ module.exports = class Server {
setTimeout(bindedChecker, interval); setTimeout(bindedChecker, interval);
}); });
} }
let bindedChecker = checker.bind(this); const bindedChecker = checker.bind(this);
bindedChecker(); bindedChecker();
}); });
} }
async rm() { async rm() {
try { try {
await docker.stop(this.id); await this.ct.stop();
await docker.rm(this.id, {volumes: true}); await this.ct.rm({volumes: true});
} catch (e) {} } catch (e) {}
} }
}; };