Update shasum when push
This commit is contained in:
parent
46b65eafbe
commit
0c21859a52
|
@ -209,9 +209,7 @@ start a small project.
|
|||
|
||||
## Todo
|
||||
|
||||
* Don't push (modified) routines whose SQL text is the same in DB.
|
||||
* Preserve all characteristics on pull: comments, SQL mode, READS SQL DATA...
|
||||
* Update routines shasum when push.
|
||||
* Undo changes when there is an error applying a version using "undo" files.
|
||||
* Use a custom *Dockerfile* for local database container.
|
||||
* Console logging via events.
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
module.exports = {
|
||||
schemaCol: 'EVENT_SCHEMA',
|
||||
nameCol: 'EVENT_NAME'
|
||||
};
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
SELECT
|
||||
`EVENT_SCHEMA` AS `schema`,
|
||||
`EVENT_NAME` AS `name`,
|
||||
`DEFINER` AS `definer`,
|
||||
`EVENT_DEFINITION` AS `body`,
|
||||
|
@ -14,5 +15,5 @@ SELECT
|
|||
`EVENT_COMMENT` AS `comment`,
|
||||
`LAST_ALTERED` AS `modified`
|
||||
FROM `information_schema`.`EVENTS`
|
||||
WHERE `EVENT_SCHEMA` = ?
|
||||
WHERE ?
|
||||
ORDER BY `name`
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
module.exports = {
|
||||
schemaCol: 'db',
|
||||
nameCol: 'name'
|
||||
};
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
SELECT
|
||||
`db` AS `schema`,
|
||||
`name`,
|
||||
`definer`,
|
||||
`param_list` AS `paramList`,
|
||||
|
@ -8,5 +9,5 @@ SELECT
|
|||
`body`,
|
||||
`modified`
|
||||
FROM `mysql`.`proc`
|
||||
WHERE `db` = ? AND `type` = 'FUNCTION'
|
||||
WHERE ? AND `type` = 'FUNCTION'
|
||||
ORDER BY `name`
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
module.exports = {
|
||||
schemaCol: 'db',
|
||||
nameCol: 'name'
|
||||
};
|
|
@ -1,10 +1,11 @@
|
|||
|
||||
SELECT
|
||||
`db` AS `schema`,
|
||||
`name`,
|
||||
`definer`,
|
||||
`param_list` AS `paramList`,
|
||||
`body`,
|
||||
`modified`
|
||||
FROM `mysql`.`proc`
|
||||
WHERE `db` = ? AND `type` = 'PROCEDURE'
|
||||
WHERE ? AND `type` = 'PROCEDURE'
|
||||
ORDER BY `name`
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
module.exports = {
|
||||
schemaCol: 'TRIGGER_SCHEMA',
|
||||
nameCol: 'TRIGGER_NAME'
|
||||
};
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
SELECT
|
||||
`TRIGGER_SCHEMA` AS `schema`,
|
||||
`TRIGGER_NAME` AS `name`,
|
||||
`DEFINER` AS `definer`,
|
||||
`ACTION_TIMING` AS `actionTiming`,
|
||||
|
@ -8,5 +9,5 @@ SELECT
|
|||
`ACTION_STATEMENT` AS `body`,
|
||||
`CREATED` AS `modified`
|
||||
FROM `information_schema`.`TRIGGERS`
|
||||
WHERE `TRIGGER_SCHEMA` = ?
|
||||
WHERE ?
|
||||
ORDER BY `name`
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
|
||||
const sqlFormatter = require('@sqltools/formatter');
|
||||
|
||||
module.exports = function(params) {
|
||||
params.definition = sqlFormatter.format(params.definition, {
|
||||
indent: '\t',
|
||||
reservedWordCase: 'upper'
|
||||
});
|
||||
}
|
||||
module.exports = {
|
||||
schemaCol: 'TABLE_SCHEMA',
|
||||
nameCol: 'TABLE_NAME',
|
||||
formatter(params) {
|
||||
params.definition = sqlFormatter.format(params.definition, {
|
||||
indent: '\t',
|
||||
reservedWordCase: 'upper'
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
|
||||
SELECT
|
||||
`TABLE_SCHEMA` AS `schema`,
|
||||
`TABLE_NAME` AS `name`,
|
||||
`VIEW_DEFINITION` AS `definition`,
|
||||
`CHECK_OPTION` AS `checkOption`,
|
||||
|
@ -7,5 +8,5 @@ SELECT
|
|||
`DEFINER` AS `definer`,
|
||||
`SECURITY_TYPE` AS `securityType`
|
||||
FROM `information_schema`.`VIEWS`
|
||||
WHERE `TABLE_SCHEMA` = ?
|
||||
WHERE ?
|
||||
ORDER BY `name`
|
||||
|
|
163
lib.js
163
lib.js
|
@ -1,6 +1,169 @@
|
|||
|
||||
const ejs = require('ejs');
|
||||
const shajs = require('sha.js');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
function camelToSnake(str) {
|
||||
return str.replace(/[A-Z]/g, match => `-${match.toLowerCase()}`);
|
||||
}
|
||||
|
||||
class Exporter {
|
||||
constructor(engine, objectType, conn) {
|
||||
this.engine = engine;
|
||||
this.objectType = objectType;
|
||||
this.dstDir = `${objectType}s`;
|
||||
this.conn = conn;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const templateDir = `${__dirname}/exporters/${this.objectType}`;
|
||||
this.sql = await fs.readFile(`${templateDir}.sql`, 'utf8');
|
||||
|
||||
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
|
||||
this.template = ejs.compile(templateFile);
|
||||
this.attrs = require(`${templateDir}.js`);
|
||||
}
|
||||
|
||||
async export(exportDir, schema, newSums, oldSums) {
|
||||
const res = await this.query(schema);
|
||||
if (!res.length) return;
|
||||
|
||||
const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
|
||||
if (!await fs.pathExists(routineDir))
|
||||
await fs.mkdir(routineDir);
|
||||
|
||||
const routineSet = new Set();
|
||||
for (const params of res)
|
||||
routineSet.add(params.name);
|
||||
|
||||
const routines = await fs.readdir(routineDir);
|
||||
for (const routineFile of routines) {
|
||||
const match = routineFile.match(/^(.*)\.sql$/);
|
||||
if (!match) continue;
|
||||
const routine = match[1];
|
||||
if (!routineSet.has(routine))
|
||||
await fs.remove(`${routineDir}/${routine}.sql`);
|
||||
}
|
||||
|
||||
for (const params of res) {
|
||||
const routineName = params.name;
|
||||
const sql = this.format(params);
|
||||
const routineFile = `${routineDir}/${routineName}.sql`;
|
||||
|
||||
const shaSum = this.engine.shaSum(sql);
|
||||
newSums[routineName] = shaSum;
|
||||
|
||||
if (oldSums[routineName] !== shaSum)
|
||||
await fs.writeFile(routineFile, sql);
|
||||
}
|
||||
}
|
||||
|
||||
async query(schema, name) {
|
||||
const {conn} = this;
|
||||
|
||||
const ops = [];
|
||||
function addOp(col, value) {
|
||||
ops.push(conn.format('?? = ?', [col, value]));
|
||||
}
|
||||
if (schema)
|
||||
addOp(this.attrs.schemaCol, schema);
|
||||
if (name)
|
||||
addOp(this.attrs.nameCol, name);
|
||||
|
||||
const filter = {
|
||||
toSqlString() {
|
||||
return ops.join(' AND ');
|
||||
}
|
||||
}
|
||||
|
||||
const [res] = await conn.query(this.sql, [filter]);
|
||||
return res;
|
||||
}
|
||||
|
||||
format(params) {
|
||||
const {conn} = this;
|
||||
|
||||
if (this.attrs.formatter)
|
||||
this.attrs.formatter(params);
|
||||
|
||||
const split = params.definer.split('@');
|
||||
params.schema = conn.escapeId(params.schema, true);
|
||||
params.name = conn.escapeId(params.name, true);
|
||||
params.definer =
|
||||
`${conn.escapeId(split[0], true)}@${conn.escapeId(split[1], true)}`;
|
||||
|
||||
return this.template(params);
|
||||
}
|
||||
}
|
||||
class ExporterEngine {
|
||||
constructor(conn, myvcDir) {
|
||||
this.conn = conn;
|
||||
this.shaFile = `${myvcDir}/.shasums.json`;
|
||||
this.exporters = [];
|
||||
this.exporterMap = {};
|
||||
}
|
||||
|
||||
async init () {
|
||||
if (await fs.pathExists(this.shaFile))
|
||||
this.shaSums = JSON.parse(await fs.readFile(this.shaFile, 'utf8'));
|
||||
else
|
||||
this.shaSums = {};
|
||||
|
||||
const types = [
|
||||
'function',
|
||||
'procedure',
|
||||
'view',
|
||||
'trigger',
|
||||
'event'
|
||||
];
|
||||
|
||||
for (const type of types) {
|
||||
const exporter = new Exporter(this, type, this.conn);
|
||||
await exporter.init();
|
||||
|
||||
this.exporters.push(exporter);
|
||||
this.exporterMap[type] = exporter;
|
||||
}
|
||||
}
|
||||
|
||||
async fetchRoutine(type, schema, name) {
|
||||
const exporter = this.exporterMap[type];
|
||||
const [row] = await exporter.query(schema, name);
|
||||
return row && exporter.format(row);
|
||||
}
|
||||
|
||||
async fetchShaSum(type, schema, name) {
|
||||
const sql = await this.fetchRoutine(type, schema, name);
|
||||
this.setShaSum(type, schema, name, this.shaSum(sql));
|
||||
}
|
||||
|
||||
shaSum(sql) {
|
||||
return shajs('sha256')
|
||||
.update(JSON.stringify(sql))
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
setShaSum(type, schema, name, shaSum) {
|
||||
const shaSums = this.shaSums;
|
||||
if (!shaSums[schema])
|
||||
shaSums[schema] = {};
|
||||
if (!shaSums[schema][type])
|
||||
shaSums[schema][type] = {};
|
||||
shaSums[schema][type][name] = shaSum;
|
||||
}
|
||||
|
||||
deleteShaSum(type, schema, name) {
|
||||
try {
|
||||
delete this.shaSums[schema][type][name];
|
||||
} catch (e) {};
|
||||
}
|
||||
|
||||
async saveShaSums() {
|
||||
await fs.writeFile(this.shaFile,
|
||||
JSON.stringify(this.shaSums, null, ' '));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.camelToSnake = camelToSnake;
|
||||
module.exports.Exporter = Exporter;
|
||||
module.exports.ExporterEngine = ExporterEngine;
|
||||
|
|
117
myvc-pull.js
117
myvc-pull.js
|
@ -1,10 +1,8 @@
|
|||
|
||||
const MyVC = require('./myvc');
|
||||
const fs = require('fs-extra');
|
||||
const ejs = require('ejs');
|
||||
const shajs = require('sha.js');
|
||||
const nodegit = require('nodegit');
|
||||
|
||||
const ExporterEngine = require('./lib').ExporterEngine;
|
||||
class Pull {
|
||||
get usage() {
|
||||
return {
|
||||
|
@ -79,30 +77,14 @@ class Pull {
|
|||
|
||||
console.log(`Incorporating routine changes.`);
|
||||
|
||||
const exporters = [
|
||||
new Exporter('function'),
|
||||
new Exporter('procedure'),
|
||||
new Exporter('view'),
|
||||
new Exporter('trigger'),
|
||||
new Exporter('event')
|
||||
];
|
||||
|
||||
for (const exporter of exporters)
|
||||
await exporter.init();
|
||||
const engine = new ExporterEngine(conn, opts.myvcDir);
|
||||
await engine.init();
|
||||
const shaSums = engine.shaSums;
|
||||
|
||||
const exportDir = `${opts.myvcDir}/routines`;
|
||||
if (!await fs.pathExists(exportDir))
|
||||
await fs.mkdir(exportDir);
|
||||
|
||||
// Initialize SHA data
|
||||
|
||||
let newShaSums = {};
|
||||
let oldShaSums;
|
||||
const shaFile = `${opts.myvcDir}/.shasums.json`;
|
||||
|
||||
if (await fs.pathExists(shaFile))
|
||||
oldShaSums = JSON.parse(await fs.readFile(shaFile, 'utf8'));
|
||||
|
||||
// Delete old schemas
|
||||
|
||||
const schemas = await fs.readdir(exportDir);
|
||||
|
@ -111,93 +93,30 @@ class Pull {
|
|||
await fs.remove(`${exportDir}/${schema}`, {recursive: true});
|
||||
}
|
||||
|
||||
for (const schema in shaSums) {
|
||||
if (!await fs.pathExists(`${exportDir}/${schema}`))
|
||||
delete shaSums[schema];
|
||||
}
|
||||
|
||||
// Export objects to SQL files
|
||||
|
||||
for (const schema of opts.schemas) {
|
||||
newShaSums[schema] = {};
|
||||
|
||||
let schemaDir = `${exportDir}/${schema}`;
|
||||
if (!await fs.pathExists(schemaDir))
|
||||
await fs.mkdir(schemaDir);
|
||||
if (!shaSums[schema])
|
||||
shaSums[schema] = {};
|
||||
const sums = shaSums[schema];
|
||||
|
||||
for (const exporter of exporters) {
|
||||
const objectType = exporter.objectType;
|
||||
const newSums = newShaSums[schema][objectType] = {};
|
||||
let oldSums = {};
|
||||
try {
|
||||
oldSums = oldShaSums[schema][objectType];
|
||||
} catch (e) {}
|
||||
|
||||
await exporter.export(conn, exportDir, schema, newSums, oldSums);
|
||||
for (const exporter of engine.exporters) {
|
||||
const type = exporter.objectType;
|
||||
const oldSums = sums[type] || {};
|
||||
sums[type] = {};
|
||||
await exporter.export(exportDir, schema, sums[type], oldSums);
|
||||
}
|
||||
}
|
||||
|
||||
// Save SHA data
|
||||
|
||||
await fs.writeFile(shaFile, JSON.stringify(newShaSums, null, ' '));
|
||||
}
|
||||
}
|
||||
|
||||
class Exporter {
|
||||
constructor(objectType) {
|
||||
this.objectType = objectType;
|
||||
this.dstDir = `${objectType}s`;
|
||||
}
|
||||
|
||||
async init() {
|
||||
const templateDir = `${__dirname}/exporters/${this.objectType}`;
|
||||
this.query = await fs.readFile(`${templateDir}.sql`, 'utf8');
|
||||
|
||||
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
|
||||
this.template = ejs.compile(templateFile);
|
||||
|
||||
if (await fs.pathExists(`${templateDir}.js`))
|
||||
this.formatter = require(`${templateDir}.js`);
|
||||
}
|
||||
|
||||
async export(conn, exportDir, schema, newSums, oldSums) {
|
||||
const [res] = await conn.query(this.query, [schema]);
|
||||
if (!res.length) return;
|
||||
|
||||
const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
|
||||
if (!await fs.pathExists(routineDir))
|
||||
await fs.mkdir(routineDir);
|
||||
|
||||
const routineSet = new Set();
|
||||
for (const params of res)
|
||||
routineSet.add(params.name);
|
||||
|
||||
const routines = await fs.readdir(routineDir);
|
||||
for (const routineFile of routines) {
|
||||
const match = routineFile.match(/^(.*)\.sql$/);
|
||||
if (!match) continue;
|
||||
const routine = match[1];
|
||||
if (!routineSet.has(routine))
|
||||
await fs.remove(`${routineDir}/${routine}.sql`);
|
||||
}
|
||||
|
||||
for (const params of res) {
|
||||
if (this.formatter)
|
||||
this.formatter(params, schema)
|
||||
|
||||
const routineName = params.name;
|
||||
const split = params.definer.split('@');
|
||||
params.schema = conn.escapeId(schema);
|
||||
params.name = conn.escapeId(routineName, true);
|
||||
params.definer =
|
||||
`${conn.escapeId(split[0], true)}@${conn.escapeId(split[1], true)}`;
|
||||
|
||||
const sql = this.template(params);
|
||||
const routineFile = `${routineDir}/${routineName}.sql`;
|
||||
|
||||
const shaSum = shajs('sha256')
|
||||
.update(JSON.stringify(sql))
|
||||
.digest('hex');
|
||||
newSums[routineName] = shaSum;
|
||||
|
||||
if (oldSums[routineName] !== shaSum)
|
||||
await fs.writeFile(routineFile, sql);
|
||||
}
|
||||
await engine.saveShaSums();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
94
myvc-push.js
94
myvc-push.js
|
@ -2,6 +2,7 @@
|
|||
const MyVC = require('./myvc');
|
||||
const fs = require('fs-extra');
|
||||
const nodegit = require('nodegit');
|
||||
const ExporterEngine = require('./lib').ExporterEngine;
|
||||
|
||||
/**
|
||||
* Pushes changes to remote.
|
||||
|
@ -46,6 +47,8 @@ class Push {
|
|||
throw new Error(`Cannot obtain exclusive lock, used by connection ${isUsed}`);
|
||||
}
|
||||
|
||||
const pushConn = await myvc.createConnection();
|
||||
|
||||
// Get database version
|
||||
|
||||
const version = await myvc.fetchDbVersion() || {};
|
||||
|
@ -95,7 +98,6 @@ class Push {
|
|||
// Apply versions
|
||||
|
||||
console.log('Applying versions.');
|
||||
const pushConn = await myvc.createConnection();
|
||||
|
||||
let nChanges = 0;
|
||||
const versionsDir = `${opts.myvcDir}/versions`;
|
||||
|
@ -163,7 +165,8 @@ class Push {
|
|||
|
||||
let err;
|
||||
try {
|
||||
await this.queryFromFile(pushConn, `${scriptsDir}/${script}`);
|
||||
await this.queryFromFile(pushConn,
|
||||
`${scriptsDir}/${script}`);
|
||||
} catch (e) {
|
||||
err = e;
|
||||
}
|
||||
|
@ -231,40 +234,67 @@ class Push {
|
|||
);
|
||||
}
|
||||
|
||||
const engine = new ExporterEngine(conn, opts.myvcDir);
|
||||
await engine.init();
|
||||
|
||||
for (const change of changes) {
|
||||
const schema = change.schema;
|
||||
const name = change.name;
|
||||
const type = change.type.name.toLowerCase();
|
||||
const fullPath = `${opts.myvcDir}/routines/${change.path}.sql`;
|
||||
const exists = await fs.pathExists(fullPath);
|
||||
|
||||
const actionMsg = exists ? '[+]'.green : '[-]'.red;
|
||||
let newSql;
|
||||
if (exists)
|
||||
newSql = await fs.readFile(fullPath, 'utf8');
|
||||
const oldSql = await engine.fetchRoutine(type, schema, name);
|
||||
const isEqual = newSql == oldSql;
|
||||
|
||||
let actionMsg;
|
||||
if ((exists && isEqual) || (!exists && !oldSql))
|
||||
actionMsg = '[I]'.blue;
|
||||
else if (exists)
|
||||
actionMsg = '[+]'.green;
|
||||
else
|
||||
actionMsg = '[-]'.red;
|
||||
|
||||
const typeMsg = `[${change.type.abbr}]`[change.type.color];
|
||||
console.log('', actionMsg.bold, typeMsg.bold, change.fullName);
|
||||
|
||||
try {
|
||||
const scapedSchema = pushConn.escapeId(change.schema, true);
|
||||
const scapedSchema = pushConn.escapeId(schema, true);
|
||||
|
||||
if (exists) {
|
||||
if (change.type.name === 'VIEW')
|
||||
await pushConn.query(`USE ${scapedSchema}`);
|
||||
if (!isEqual) {
|
||||
if (change.type.name === 'VIEW')
|
||||
await pushConn.query(`USE ${scapedSchema}`);
|
||||
|
||||
await this.queryFromFile(pushConn, `routines/${change.path}.sql`);
|
||||
await this.multiQuery(pushConn, newSql);
|
||||
nRoutines++;
|
||||
|
||||
if (change.isRoutine) {
|
||||
await conn.query(
|
||||
`INSERT IGNORE INTO mysql.procs_priv
|
||||
SELECT * FROM tProcsPriv
|
||||
WHERE Db = ?
|
||||
AND Routine_name = ?
|
||||
AND Routine_type = ?`,
|
||||
[change.schema, change.name, change.type.name]
|
||||
);
|
||||
if (change.isRoutine) {
|
||||
await conn.query(
|
||||
`INSERT IGNORE INTO mysql.procs_priv
|
||||
SELECT * FROM tProcsPriv
|
||||
WHERE Db = ?
|
||||
AND Routine_name = ?
|
||||
AND Routine_type = ?`,
|
||||
[schema, name, change.type.name]
|
||||
);
|
||||
}
|
||||
|
||||
await engine.fetchShaSum(type, schema, name);
|
||||
}
|
||||
} else {
|
||||
const escapedName =
|
||||
scapedSchema + '.' +
|
||||
pushConn.escapeId(change.name, true);
|
||||
pushConn.escapeId(name, true);
|
||||
|
||||
const query = `DROP ${change.type.name} IF EXISTS ${escapedName}`;
|
||||
await pushConn.query(query);
|
||||
|
||||
engine.deleteShaSum(type, schema, name);
|
||||
nRoutines++;
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.sqlState)
|
||||
|
@ -273,18 +303,15 @@ class Push {
|
|||
throw err;
|
||||
}
|
||||
|
||||
nRoutines++;
|
||||
}
|
||||
|
||||
await engine.saveShaSums();
|
||||
|
||||
if (routines.length) {
|
||||
await conn.query(`DROP TEMPORARY TABLE tProcsPriv`);
|
||||
await conn.query('FLUSH PRIVILEGES');
|
||||
}
|
||||
|
||||
// Update and release
|
||||
|
||||
await pushConn.end();
|
||||
|
||||
if (nRoutines > 0) {
|
||||
console.log(` -> ${nRoutines} routines have changed.`);
|
||||
} else
|
||||
|
@ -296,6 +323,9 @@ class Push {
|
|||
if (version.gitCommit !== head.sha())
|
||||
await this.updateVersion(nRoutines, 'gitCommit', head.sha());
|
||||
|
||||
// Update and release
|
||||
|
||||
await pushConn.end();
|
||||
await conn.query(`DO RELEASE_LOCK('myvc_push')`);
|
||||
}
|
||||
|
||||
|
@ -328,14 +358,15 @@ class Push {
|
|||
}
|
||||
|
||||
/**
|
||||
* Executes an SQL script.
|
||||
* Executes a multi-query string.
|
||||
*
|
||||
* @param {String} file Path to the SQL script
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @param {String} sql SQL multi-query string
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async queryFromFile(conn, file) {
|
||||
async multiQuery(conn, sql) {
|
||||
let results = [];
|
||||
const stmts = this.querySplit(await fs.readFile(file, 'utf8'));
|
||||
const stmts = this.querySplit(sql);
|
||||
|
||||
for (const stmt of stmts)
|
||||
results = results.concat(await conn.query(stmt));
|
||||
|
@ -343,6 +374,17 @@ class Push {
|
|||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an SQL script.
|
||||
*
|
||||
* @param {Connection} conn MySQL connection object
|
||||
* @returns {Array<Result>} The resultset
|
||||
*/
|
||||
async queryFromFile(conn, file) {
|
||||
const sql = await fs.readFile(file, 'utf8');
|
||||
return await this.multiQuery(conn, sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* Splits an SQL muti-query into a single-query array, it does an small
|
||||
* parse to correctly handle the DELIMITER statement.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "myvc",
|
||||
"version": "1.2.4",
|
||||
"version": "1.2.5",
|
||||
"author": "Verdnatura Levante SL",
|
||||
"description": "MySQL Version Control",
|
||||
"license": "GPL-3.0",
|
||||
|
|
Loading…
Reference in New Issue