Update shasum when push
This commit is contained in:
parent
46b65eafbe
commit
0c21859a52
|
@ -209,9 +209,7 @@ start a small project.
|
||||||
|
|
||||||
## Todo
|
## Todo
|
||||||
|
|
||||||
* Don't push (modified) routines whose SQL text is the same in DB.
|
|
||||||
* Preserve all characteristics on pull: comments, SQL mode, READS SQL DATA...
|
* Preserve all characteristics on pull: comments, SQL mode, READS SQL DATA...
|
||||||
* Update routines shasum when push.
|
|
||||||
* Undo changes when there is an error applying a version using "undo" files.
|
* Undo changes when there is an error applying a version using "undo" files.
|
||||||
* Use a custom *Dockerfile* for local database container.
|
* Use a custom *Dockerfile* for local database container.
|
||||||
* Console logging via events.
|
* Console logging via events.
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schemaCol: 'EVENT_SCHEMA',
|
||||||
|
nameCol: 'EVENT_NAME'
|
||||||
|
};
|
|
@ -1,5 +1,6 @@
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
`EVENT_SCHEMA` AS `schema`,
|
||||||
`EVENT_NAME` AS `name`,
|
`EVENT_NAME` AS `name`,
|
||||||
`DEFINER` AS `definer`,
|
`DEFINER` AS `definer`,
|
||||||
`EVENT_DEFINITION` AS `body`,
|
`EVENT_DEFINITION` AS `body`,
|
||||||
|
@ -14,5 +15,5 @@ SELECT
|
||||||
`EVENT_COMMENT` AS `comment`,
|
`EVENT_COMMENT` AS `comment`,
|
||||||
`LAST_ALTERED` AS `modified`
|
`LAST_ALTERED` AS `modified`
|
||||||
FROM `information_schema`.`EVENTS`
|
FROM `information_schema`.`EVENTS`
|
||||||
WHERE `EVENT_SCHEMA` = ?
|
WHERE ?
|
||||||
ORDER BY `name`
|
ORDER BY `name`
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schemaCol: 'db',
|
||||||
|
nameCol: 'name'
|
||||||
|
};
|
|
@ -1,5 +1,6 @@
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
`db` AS `schema`,
|
||||||
`name`,
|
`name`,
|
||||||
`definer`,
|
`definer`,
|
||||||
`param_list` AS `paramList`,
|
`param_list` AS `paramList`,
|
||||||
|
@ -8,5 +9,5 @@ SELECT
|
||||||
`body`,
|
`body`,
|
||||||
`modified`
|
`modified`
|
||||||
FROM `mysql`.`proc`
|
FROM `mysql`.`proc`
|
||||||
WHERE `db` = ? AND `type` = 'FUNCTION'
|
WHERE ? AND `type` = 'FUNCTION'
|
||||||
ORDER BY `name`
|
ORDER BY `name`
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schemaCol: 'db',
|
||||||
|
nameCol: 'name'
|
||||||
|
};
|
|
@ -1,10 +1,11 @@
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
`db` AS `schema`,
|
||||||
`name`,
|
`name`,
|
||||||
`definer`,
|
`definer`,
|
||||||
`param_list` AS `paramList`,
|
`param_list` AS `paramList`,
|
||||||
`body`,
|
`body`,
|
||||||
`modified`
|
`modified`
|
||||||
FROM `mysql`.`proc`
|
FROM `mysql`.`proc`
|
||||||
WHERE `db` = ? AND `type` = 'PROCEDURE'
|
WHERE ? AND `type` = 'PROCEDURE'
|
||||||
ORDER BY `name`
|
ORDER BY `name`
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
schemaCol: 'TRIGGER_SCHEMA',
|
||||||
|
nameCol: 'TRIGGER_NAME'
|
||||||
|
};
|
|
@ -1,5 +1,6 @@
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
`TRIGGER_SCHEMA` AS `schema`,
|
||||||
`TRIGGER_NAME` AS `name`,
|
`TRIGGER_NAME` AS `name`,
|
||||||
`DEFINER` AS `definer`,
|
`DEFINER` AS `definer`,
|
||||||
`ACTION_TIMING` AS `actionTiming`,
|
`ACTION_TIMING` AS `actionTiming`,
|
||||||
|
@ -8,5 +9,5 @@ SELECT
|
||||||
`ACTION_STATEMENT` AS `body`,
|
`ACTION_STATEMENT` AS `body`,
|
||||||
`CREATED` AS `modified`
|
`CREATED` AS `modified`
|
||||||
FROM `information_schema`.`TRIGGERS`
|
FROM `information_schema`.`TRIGGERS`
|
||||||
WHERE `TRIGGER_SCHEMA` = ?
|
WHERE ?
|
||||||
ORDER BY `name`
|
ORDER BY `name`
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
|
|
||||||
const sqlFormatter = require('@sqltools/formatter');
|
const sqlFormatter = require('@sqltools/formatter');
|
||||||
|
|
||||||
module.exports = function(params) {
|
module.exports = {
|
||||||
|
schemaCol: 'TABLE_SCHEMA',
|
||||||
|
nameCol: 'TABLE_NAME',
|
||||||
|
formatter(params) {
|
||||||
params.definition = sqlFormatter.format(params.definition, {
|
params.definition = sqlFormatter.format(params.definition, {
|
||||||
indent: '\t',
|
indent: '\t',
|
||||||
reservedWordCase: 'upper'
|
reservedWordCase: 'upper'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
|
||||||
SELECT
|
SELECT
|
||||||
|
`TABLE_SCHEMA` AS `schema`,
|
||||||
`TABLE_NAME` AS `name`,
|
`TABLE_NAME` AS `name`,
|
||||||
`VIEW_DEFINITION` AS `definition`,
|
`VIEW_DEFINITION` AS `definition`,
|
||||||
`CHECK_OPTION` AS `checkOption`,
|
`CHECK_OPTION` AS `checkOption`,
|
||||||
|
@ -7,5 +8,5 @@ SELECT
|
||||||
`DEFINER` AS `definer`,
|
`DEFINER` AS `definer`,
|
||||||
`SECURITY_TYPE` AS `securityType`
|
`SECURITY_TYPE` AS `securityType`
|
||||||
FROM `information_schema`.`VIEWS`
|
FROM `information_schema`.`VIEWS`
|
||||||
WHERE `TABLE_SCHEMA` = ?
|
WHERE ?
|
||||||
ORDER BY `name`
|
ORDER BY `name`
|
||||||
|
|
163
lib.js
163
lib.js
|
@ -1,6 +1,169 @@
|
||||||
|
|
||||||
|
const ejs = require('ejs');
|
||||||
|
const shajs = require('sha.js');
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
|
||||||
function camelToSnake(str) {
|
function camelToSnake(str) {
|
||||||
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
|
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class Exporter {
|
||||||
|
constructor(engine, objectType, conn) {
|
||||||
|
this.engine = engine;
|
||||||
|
this.objectType = objectType;
|
||||||
|
this.dstDir = `${objectType}s`;
|
||||||
|
this.conn = conn;
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
const templateDir = `${__dirname}/exporters/${this.objectType}`;
|
||||||
|
this.sql = await fs.readFile(`${templateDir}.sql`, 'utf8');
|
||||||
|
|
||||||
|
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
|
||||||
|
this.template = ejs.compile(templateFile);
|
||||||
|
this.attrs = require(`${templateDir}.js`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async export(exportDir, schema, newSums, oldSums) {
|
||||||
|
const res = await this.query(schema);
|
||||||
|
if (!res.length) return;
|
||||||
|
|
||||||
|
const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
|
||||||
|
if (!await fs.pathExists(routineDir))
|
||||||
|
await fs.mkdir(routineDir);
|
||||||
|
|
||||||
|
const routineSet = new Set();
|
||||||
|
for (const params of res)
|
||||||
|
routineSet.add(params.name);
|
||||||
|
|
||||||
|
const routines = await fs.readdir(routineDir);
|
||||||
|
for (const routineFile of routines) {
|
||||||
|
const match = routineFile.match(/^(.*)\.sql$/);
|
||||||
|
if (!match) continue;
|
||||||
|
const routine = match[1];
|
||||||
|
if (!routineSet.has(routine))
|
||||||
|
await fs.remove(`${routineDir}/${routine}.sql`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const params of res) {
|
||||||
|
const routineName = params.name;
|
||||||
|
const sql = this.format(params);
|
||||||
|
const routineFile = `${routineDir}/${routineName}.sql`;
|
||||||
|
|
||||||
|
const shaSum = this.engine.shaSum(sql);
|
||||||
|
newSums[routineName] = shaSum;
|
||||||
|
|
||||||
|
if (oldSums[routineName] !== shaSum)
|
||||||
|
await fs.writeFile(routineFile, sql);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async query(schema, name) {
|
||||||
|
const {conn} = this;
|
||||||
|
|
||||||
|
const ops = [];
|
||||||
|
function addOp(col, value) {
|
||||||
|
ops.push(conn.format('?? = ?', [col, value]));
|
||||||
|
}
|
||||||
|
if (schema)
|
||||||
|
addOp(this.attrs.schemaCol, schema);
|
||||||
|
if (name)
|
||||||
|
addOp(this.attrs.nameCol, name);
|
||||||
|
|
||||||
|
const filter = {
|
||||||
|
toSqlString() {
|
||||||
|
return ops.join(' AND ');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [res] = await conn.query(this.sql, [filter]);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
format(params) {
|
||||||
|
const {conn} = this;
|
||||||
|
|
||||||
|
if (this.attrs.formatter)
|
||||||
|
this.attrs.formatter(params);
|
||||||
|
|
||||||
|
const split = params.definer.split('@');
|
||||||
|
params.schema = conn.escapeId(params.schema, true);
|
||||||
|
params.name = conn.escapeId(params.name, true);
|
||||||
|
params.definer =
|
||||||
|
`${conn.escapeId(split[0], true)}@${conn.escapeId(split[1], true)}`;
|
||||||
|
|
||||||
|
return this.template(params);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
class ExporterEngine {
|
||||||
|
constructor(conn, myvcDir) {
|
||||||
|
this.conn = conn;
|
||||||
|
this.shaFile = `${myvcDir}/.shasums.json`;
|
||||||
|
this.exporters = [];
|
||||||
|
this.exporterMap = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
async init () {
|
||||||
|
if (await fs.pathExists(this.shaFile))
|
||||||
|
this.shaSums = JSON.parse(await fs.readFile(this.shaFile, 'utf8'));
|
||||||
|
else
|
||||||
|
this.shaSums = {};
|
||||||
|
|
||||||
|
const types = [
|
||||||
|
'function',
|
||||||
|
'procedure',
|
||||||
|
'view',
|
||||||
|
'trigger',
|
||||||
|
'event'
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const type of types) {
|
||||||
|
const exporter = new Exporter(this, type, this.conn);
|
||||||
|
await exporter.init();
|
||||||
|
|
||||||
|
this.exporters.push(exporter);
|
||||||
|
this.exporterMap[type] = exporter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchRoutine(type, schema, name) {
|
||||||
|
const exporter = this.exporterMap[type];
|
||||||
|
const [row] = await exporter.query(schema, name);
|
||||||
|
return row && exporter.format(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchShaSum(type, schema, name) {
|
||||||
|
const sql = await this.fetchRoutine(type, schema, name);
|
||||||
|
this.setShaSum(type, schema, name, this.shaSum(sql));
|
||||||
|
}
|
||||||
|
|
||||||
|
shaSum(sql) {
|
||||||
|
return shajs('sha256')
|
||||||
|
.update(JSON.stringify(sql))
|
||||||
|
.digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
setShaSum(type, schema, name, shaSum) {
|
||||||
|
const shaSums = this.shaSums;
|
||||||
|
if (!shaSums[schema])
|
||||||
|
shaSums[schema] = {};
|
||||||
|
if (!shaSums[schema][type])
|
||||||
|
shaSums[schema][type] = {};
|
||||||
|
shaSums[schema][type][name] = shaSum;
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteShaSum(type, schema, name) {
|
||||||
|
try {
|
||||||
|
delete this.shaSums[schema][type][name];
|
||||||
|
} catch (e) {};
|
||||||
|
}
|
||||||
|
|
||||||
|
async saveShaSums() {
|
||||||
|
await fs.writeFile(this.shaFile,
|
||||||
|
JSON.stringify(this.shaSums, null, ' '));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
module.exports.camelToSnake = camelToSnake;
|
module.exports.camelToSnake = camelToSnake;
|
||||||
|
module.exports.Exporter = Exporter;
|
||||||
|
module.exports.ExporterEngine = ExporterEngine;
|
||||||
|
|
117
myvc-pull.js
117
myvc-pull.js
|
@ -1,10 +1,8 @@
|
||||||
|
|
||||||
const MyVC = require('./myvc');
|
const MyVC = require('./myvc');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const ejs = require('ejs');
|
|
||||||
const shajs = require('sha.js');
|
|
||||||
const nodegit = require('nodegit');
|
const nodegit = require('nodegit');
|
||||||
|
const ExporterEngine = require('./lib').ExporterEngine;
|
||||||
class Pull {
|
class Pull {
|
||||||
get usage() {
|
get usage() {
|
||||||
return {
|
return {
|
||||||
|
@ -79,30 +77,14 @@ class Pull {
|
||||||
|
|
||||||
console.log(`Incorporating routine changes.`);
|
console.log(`Incorporating routine changes.`);
|
||||||
|
|
||||||
const exporters = [
|
const engine = new ExporterEngine(conn, opts.myvcDir);
|
||||||
new Exporter('function'),
|
await engine.init();
|
||||||
new Exporter('procedure'),
|
const shaSums = engine.shaSums;
|
||||||
new Exporter('view'),
|
|
||||||
new Exporter('trigger'),
|
|
||||||
new Exporter('event')
|
|
||||||
];
|
|
||||||
|
|
||||||
for (const exporter of exporters)
|
|
||||||
await exporter.init();
|
|
||||||
|
|
||||||
const exportDir = `${opts.myvcDir}/routines`;
|
const exportDir = `${opts.myvcDir}/routines`;
|
||||||
if (!await fs.pathExists(exportDir))
|
if (!await fs.pathExists(exportDir))
|
||||||
await fs.mkdir(exportDir);
|
await fs.mkdir(exportDir);
|
||||||
|
|
||||||
// Initialize SHA data
|
|
||||||
|
|
||||||
let newShaSums = {};
|
|
||||||
let oldShaSums;
|
|
||||||
const shaFile = `${opts.myvcDir}/.shasums.json`;
|
|
||||||
|
|
||||||
if (await fs.pathExists(shaFile))
|
|
||||||
oldShaSums = JSON.parse(await fs.readFile(shaFile, 'utf8'));
|
|
||||||
|
|
||||||
// Delete old schemas
|
// Delete old schemas
|
||||||
|
|
||||||
const schemas = await fs.readdir(exportDir);
|
const schemas = await fs.readdir(exportDir);
|
||||||
|
@ -111,93 +93,30 @@ class Pull {
|
||||||
await fs.remove(`${exportDir}/${schema}`, {recursive: true});
|
await fs.remove(`${exportDir}/${schema}`, {recursive: true});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (const schema in shaSums) {
|
||||||
|
if (!await fs.pathExists(`${exportDir}/${schema}`))
|
||||||
|
delete shaSums[schema];
|
||||||
|
}
|
||||||
|
|
||||||
// Export objects to SQL files
|
// Export objects to SQL files
|
||||||
|
|
||||||
for (const schema of opts.schemas) {
|
for (const schema of opts.schemas) {
|
||||||
newShaSums[schema] = {};
|
|
||||||
|
|
||||||
let schemaDir = `${exportDir}/${schema}`;
|
let schemaDir = `${exportDir}/${schema}`;
|
||||||
if (!await fs.pathExists(schemaDir))
|
if (!await fs.pathExists(schemaDir))
|
||||||
await fs.mkdir(schemaDir);
|
await fs.mkdir(schemaDir);
|
||||||
|
if (!shaSums[schema])
|
||||||
|
shaSums[schema] = {};
|
||||||
|
const sums = shaSums[schema];
|
||||||
|
|
||||||
for (const exporter of exporters) {
|
for (const exporter of engine.exporters) {
|
||||||
const objectType = exporter.objectType;
|
const type = exporter.objectType;
|
||||||
const newSums = newShaSums[schema][objectType] = {};
|
const oldSums = sums[type] || {};
|
||||||
let oldSums = {};
|
sums[type] = {};
|
||||||
try {
|
await exporter.export(exportDir, schema, sums[type], oldSums);
|
||||||
oldSums = oldShaSums[schema][objectType];
|
|
||||||
} catch (e) {}
|
|
||||||
|
|
||||||
await exporter.export(conn, exportDir, schema, newSums, oldSums);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save SHA data
|
await engine.saveShaSums();
|
||||||
|
|
||||||
await fs.writeFile(shaFile, JSON.stringify(newShaSums, null, ' '));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class Exporter {
|
|
||||||
constructor(objectType) {
|
|
||||||
this.objectType = objectType;
|
|
||||||
this.dstDir = `${objectType}s`;
|
|
||||||
}
|
|
||||||
|
|
||||||
async init() {
|
|
||||||
const templateDir = `${__dirname}/exporters/${this.objectType}`;
|
|
||||||
this.query = await fs.readFile(`${templateDir}.sql`, 'utf8');
|
|
||||||
|
|
||||||
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
|
|
||||||
this.template = ejs.compile(templateFile);
|
|
||||||
|
|
||||||
if (await fs.pathExists(`${templateDir}.js`))
|
|
||||||
this.formatter = require(`${templateDir}.js`);
|
|
||||||
}
|
|
||||||
|
|
||||||
async export(conn, exportDir, schema, newSums, oldSums) {
|
|
||||||
const [res] = await conn.query(this.query, [schema]);
|
|
||||||
if (!res.length) return;
|
|
||||||
|
|
||||||
const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
|
|
||||||
if (!await fs.pathExists(routineDir))
|
|
||||||
await fs.mkdir(routineDir);
|
|
||||||
|
|
||||||
const routineSet = new Set();
|
|
||||||
for (const params of res)
|
|
||||||
routineSet.add(params.name);
|
|
||||||
|
|
||||||
const routines = await fs.readdir(routineDir);
|
|
||||||
for (const routineFile of routines) {
|
|
||||||
const match = routineFile.match(/^(.*)\.sql$/);
|
|
||||||
if (!match) continue;
|
|
||||||
const routine = match[1];
|
|
||||||
if (!routineSet.has(routine))
|
|
||||||
await fs.remove(`${routineDir}/${routine}.sql`);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const params of res) {
|
|
||||||
if (this.formatter)
|
|
||||||
this.formatter(params, schema)
|
|
||||||
|
|
||||||
const routineName = params.name;
|
|
||||||
const split = params.definer.split('@');
|
|
||||||
params.schema = conn.escapeId(schema);
|
|
||||||
params.name = conn.escapeId(routineName, true);
|
|
||||||
params.definer =
|
|
||||||
`${conn.escapeId(split[0], true)}@${conn.escapeId(split[1], true)}`;
|
|
||||||
|
|
||||||
const sql = this.template(params);
|
|
||||||
const routineFile = `${routineDir}/${routineName}.sql`;
|
|
||||||
|
|
||||||
const shaSum = shajs('sha256')
|
|
||||||
.update(JSON.stringify(sql))
|
|
||||||
.digest('hex');
|
|
||||||
newSums[routineName] = shaSum;
|
|
||||||
|
|
||||||
if (oldSums[routineName] !== shaSum)
|
|
||||||
await fs.writeFile(routineFile, sql);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
74
myvc-push.js
74
myvc-push.js
|
@ -2,6 +2,7 @@
|
||||||
const MyVC = require('./myvc');
|
const MyVC = require('./myvc');
|
||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const nodegit = require('nodegit');
|
const nodegit = require('nodegit');
|
||||||
|
const ExporterEngine = require('./lib').ExporterEngine;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Pushes changes to remote.
|
* Pushes changes to remote.
|
||||||
|
@ -46,6 +47,8 @@ class Push {
|
||||||
throw new Error(`Cannot obtain exclusive lock, used by connection ${isUsed}`);
|
throw new Error(`Cannot obtain exclusive lock, used by connection ${isUsed}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const pushConn = await myvc.createConnection();
|
||||||
|
|
||||||
// Get database version
|
// Get database version
|
||||||
|
|
||||||
const version = await myvc.fetchDbVersion() || {};
|
const version = await myvc.fetchDbVersion() || {};
|
||||||
|
@ -95,7 +98,6 @@ class Push {
|
||||||
// Apply versions
|
// Apply versions
|
||||||
|
|
||||||
console.log('Applying versions.');
|
console.log('Applying versions.');
|
||||||
const pushConn = await myvc.createConnection();
|
|
||||||
|
|
||||||
let nChanges = 0;
|
let nChanges = 0;
|
||||||
const versionsDir = `${opts.myvcDir}/versions`;
|
const versionsDir = `${opts.myvcDir}/versions`;
|
||||||
|
@ -163,7 +165,8 @@ class Push {
|
||||||
|
|
||||||
let err;
|
let err;
|
||||||
try {
|
try {
|
||||||
await this.queryFromFile(pushConn, `${scriptsDir}/${script}`);
|
await this.queryFromFile(pushConn,
|
||||||
|
`${scriptsDir}/${script}`);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
err = e;
|
err = e;
|
||||||
}
|
}
|
||||||
|
@ -231,22 +234,43 @@ class Push {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const engine = new ExporterEngine(conn, opts.myvcDir);
|
||||||
|
await engine.init();
|
||||||
|
|
||||||
for (const change of changes) {
|
for (const change of changes) {
|
||||||
|
const schema = change.schema;
|
||||||
|
const name = change.name;
|
||||||
|
const type = change.type.name.toLowerCase();
|
||||||
const fullPath = `${opts.myvcDir}/routines/${change.path}.sql`;
|
const fullPath = `${opts.myvcDir}/routines/${change.path}.sql`;
|
||||||
const exists = await fs.pathExists(fullPath);
|
const exists = await fs.pathExists(fullPath);
|
||||||
|
|
||||||
const actionMsg = exists ? '[+]'.green : '[-]'.red;
|
let newSql;
|
||||||
|
if (exists)
|
||||||
|
newSql = await fs.readFile(fullPath, 'utf8');
|
||||||
|
const oldSql = await engine.fetchRoutine(type, schema, name);
|
||||||
|
const isEqual = newSql == oldSql;
|
||||||
|
|
||||||
|
let actionMsg;
|
||||||
|
if ((exists && isEqual) || (!exists && !oldSql))
|
||||||
|
actionMsg = '[I]'.blue;
|
||||||
|
else if (exists)
|
||||||
|
actionMsg = '[+]'.green;
|
||||||
|
else
|
||||||
|
actionMsg = '[-]'.red;
|
||||||
|
|
||||||
const typeMsg = `[${change.type.abbr}]`[change.type.color];
|
const typeMsg = `[${change.type.abbr}]`[change.type.color];
|
||||||
console.log('', actionMsg.bold, typeMsg.bold, change.fullName);
|
console.log('', actionMsg.bold, typeMsg.bold, change.fullName);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const scapedSchema = pushConn.escapeId(change.schema, true);
|
const scapedSchema = pushConn.escapeId(schema, true);
|
||||||
|
|
||||||
if (exists) {
|
if (exists) {
|
||||||
|
if (!isEqual) {
|
||||||
if (change.type.name === 'VIEW')
|
if (change.type.name === 'VIEW')
|
||||||
await pushConn.query(`USE ${scapedSchema}`);
|
await pushConn.query(`USE ${scapedSchema}`);
|
||||||
|
|
||||||
await this.queryFromFile(pushConn, `routines/${change.path}.sql`);
|
await this.multiQuery(pushConn, newSql);
|
||||||
|
nRoutines++;
|
||||||
|
|
||||||
if (change.isRoutine) {
|
if (change.isRoutine) {
|
||||||
await conn.query(
|
await conn.query(
|
||||||
|
@ -255,16 +279,22 @@ class Push {
|
||||||
WHERE Db = ?
|
WHERE Db = ?
|
||||||
AND Routine_name = ?
|
AND Routine_name = ?
|
||||||
AND Routine_type = ?`,
|
AND Routine_type = ?`,
|
||||||
[change.schema, change.name, change.type.name]
|
[schema, name, change.type.name]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await engine.fetchShaSum(type, schema, name);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
const escapedName =
|
const escapedName =
|
||||||
scapedSchema + '.' +
|
scapedSchema + '.' +
|
||||||
pushConn.escapeId(change.name, true);
|
pushConn.escapeId(name, true);
|
||||||
|
|
||||||
const query = `DROP ${change.type.name} IF EXISTS ${escapedName}`;
|
const query = `DROP ${change.type.name} IF EXISTS ${escapedName}`;
|
||||||
await pushConn.query(query);
|
await pushConn.query(query);
|
||||||
|
|
||||||
|
engine.deleteShaSum(type, schema, name);
|
||||||
|
nRoutines++;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
if (err.sqlState)
|
if (err.sqlState)
|
||||||
|
@ -273,18 +303,15 @@ class Push {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
nRoutines++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await engine.saveShaSums();
|
||||||
|
|
||||||
if (routines.length) {
|
if (routines.length) {
|
||||||
await conn.query(`DROP TEMPORARY TABLE tProcsPriv`);
|
await conn.query(`DROP TEMPORARY TABLE tProcsPriv`);
|
||||||
await conn.query('FLUSH PRIVILEGES');
|
await conn.query('FLUSH PRIVILEGES');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update and release
|
|
||||||
|
|
||||||
await pushConn.end();
|
|
||||||
|
|
||||||
if (nRoutines > 0) {
|
if (nRoutines > 0) {
|
||||||
console.log(` -> ${nRoutines} routines have changed.`);
|
console.log(` -> ${nRoutines} routines have changed.`);
|
||||||
} else
|
} else
|
||||||
|
@ -296,6 +323,9 @@ class Push {
|
||||||
if (version.gitCommit !== head.sha())
|
if (version.gitCommit !== head.sha())
|
||||||
await this.updateVersion(nRoutines, 'gitCommit', head.sha());
|
await this.updateVersion(nRoutines, 'gitCommit', head.sha());
|
||||||
|
|
||||||
|
// Update and release
|
||||||
|
|
||||||
|
await pushConn.end();
|
||||||
await conn.query(`DO RELEASE_LOCK('myvc_push')`);
|
await conn.query(`DO RELEASE_LOCK('myvc_push')`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -328,14 +358,15 @@ class Push {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes an SQL script.
|
* Executes a multi-query string.
|
||||||
*
|
*
|
||||||
* @param {String} file Path to the SQL script
|
* @param {Connection} conn MySQL connection object
|
||||||
|
* @param {String} sql SQL multi-query string
|
||||||
* @returns {Array<Result>} The resultset
|
* @returns {Array<Result>} The resultset
|
||||||
*/
|
*/
|
||||||
async queryFromFile(conn, file) {
|
async multiQuery(conn, sql) {
|
||||||
let results = [];
|
let results = [];
|
||||||
const stmts = this.querySplit(await fs.readFile(file, 'utf8'));
|
const stmts = this.querySplit(sql);
|
||||||
|
|
||||||
for (const stmt of stmts)
|
for (const stmt of stmts)
|
||||||
results = results.concat(await conn.query(stmt));
|
results = results.concat(await conn.query(stmt));
|
||||||
|
@ -343,6 +374,17 @@ class Push {
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Executes an SQL script.
|
||||||
|
*
|
||||||
|
* @param {Connection} conn MySQL connection object
|
||||||
|
* @returns {Array<Result>} The resultset
|
||||||
|
*/
|
||||||
|
async queryFromFile(conn, file) {
|
||||||
|
const sql = await fs.readFile(file, 'utf8');
|
||||||
|
return await this.multiQuery(conn, sql);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Splits an SQL muti-query into a single-query array, it does an small
|
* Splits an SQL muti-query into a single-query array, it does an small
|
||||||
* parse to correctly handle the DELIMITER statement.
|
* parse to correctly handle the DELIMITER statement.
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "myvc",
|
"name": "myvc",
|
||||||
"version": "1.2.4",
|
"version": "1.2.5",
|
||||||
"author": "Verdnatura Levante SL",
|
"author": "Verdnatura Levante SL",
|
||||||
"description": "MySQL Version Control",
|
"description": "MySQL Version Control",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
|
|
Loading…
Reference in New Issue