Compare commits
24 Commits
Author | SHA1 | Date |
---|---|---|
|
6318a58f24 | |
|
f05cc498cc | |
|
2b6a97c243 | |
|
5e6353688f | |
|
9416617800 | |
|
105227408c | |
|
0bf241600b | |
|
71e8329dfc | |
|
5bc8fb0839 | |
|
d92be7533c | |
|
ea42017e4f | |
|
5ac41532d9 | |
|
a38cda0ba3 | |
|
a27afbaa53 | |
|
912719cc08 | |
|
dfa9570ea4 | |
|
3324175483 | |
|
823de4889f | |
|
7d68eaa3ac | |
|
dfa1db1432 | |
|
c02eed742d | |
|
79a374d460 | |
|
6eb451ecaf | |
|
26ac3e995a |
|
@ -1,2 +1,2 @@
|
|||
.DS_Store
|
||||
node_modules
|
||||
node_modules
|
|
@ -6,6 +6,14 @@ mockFunctions:
|
|||
- mockTime
|
||||
- mockUtcTime
|
||||
sumViews: true
|
||||
defaultDefiner: root@localhost
|
||||
localRemotes:
|
||||
- local
|
||||
- docker
|
||||
deprecMarkRegex: __$
|
||||
deprecCommentRegex: ^@deprecated [0-9]{4}-[0-9]{2}-[0-9]{2}
|
||||
deprecDateRegex: '[0-9]{4}-[0-9]{2}-[0-9]{2}'
|
||||
deprecRetentionPeriod: 60 # Days
|
||||
privileges:
|
||||
userTable: global_priv
|
||||
userWhere: >-
|
||||
|
|
|
@ -83,7 +83,7 @@ module.exports = class Dumper {
|
|||
}
|
||||
|
||||
async runDump(command, args) {
|
||||
const iniPath = path.join(this.opts.subdir || '', 'remotes', this.opts.iniFile);
|
||||
const iniPath = path.join('remotes', this.opts.iniFile);
|
||||
const myArgs = [
|
||||
`--defaults-file=${iniPath}`
|
||||
];
|
||||
|
|
|
@ -10,7 +10,7 @@ class Clean extends Command {
|
|||
static usage = {
|
||||
description: 'Cleans old applied versions',
|
||||
params: {
|
||||
purge: 'Wether to remove non-existent scripts from DB log'
|
||||
purge: 'Whether to remove non-existent scripts from DB log'
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ class Create extends Command {
|
|||
const params = {
|
||||
schema,
|
||||
name,
|
||||
definer: 'root@localhost'
|
||||
definer: opts.defaultDefiner
|
||||
};
|
||||
|
||||
switch (opts.type) {
|
||||
|
|
|
@ -9,7 +9,7 @@ class Dump extends Command {
|
|||
description: 'Dumps structure and fixtures from remote',
|
||||
params: {
|
||||
lock: 'Whether to lock tables on dump',
|
||||
triggers: 'Wether to include triggers into dump'
|
||||
triggers: 'Whether to include triggers into dump'
|
||||
},
|
||||
operand: 'remote'
|
||||
};
|
||||
|
|
|
@ -15,9 +15,9 @@ class Push extends Command {
|
|||
description: 'Apply changes into database',
|
||||
params: {
|
||||
force: 'Answer yes to all questions',
|
||||
commit: 'Wether to save the commit SHA into database',
|
||||
commit: 'Whether to save the commit SHA into database',
|
||||
sums: 'Save SHA sums of pushed objects',
|
||||
triggers: 'Wether to exclude triggers, used to generate local DB'
|
||||
triggers: 'Whether to exclude triggers, used to generate local DB'
|
||||
},
|
||||
operand: 'remote'
|
||||
};
|
||||
|
@ -156,7 +156,17 @@ class Push extends Command {
|
|||
throw new Error(`Cannot obtain exclusive lock, used by connection ${isUsed}`);
|
||||
}
|
||||
|
||||
const [[scheduler]] = await conn.query(`SELECT @@event_scheduler state`);
|
||||
if (scheduler.state === 'ON') await eventScheduler(false);
|
||||
|
||||
async function eventScheduler(isActive) {
|
||||
await conn.query(
|
||||
`SET GLOBAL event_scheduler = ${isActive ? 'ON' : 'OFF'}`
|
||||
);
|
||||
}
|
||||
|
||||
async function releaseLock() {
|
||||
if (scheduler.state === 'ON') await eventScheduler(true);
|
||||
await conn.query(`DO RELEASE_LOCK('myt_push')`);
|
||||
}
|
||||
|
||||
|
@ -244,13 +254,15 @@ class Push extends Command {
|
|||
|
||||
let apply = false;
|
||||
|
||||
if (!version)
|
||||
if (!version) {
|
||||
this.emit('version', version, versionDir, 'wrongDirectory');
|
||||
else if (version.number.length != dbVersion.number.length)
|
||||
continue;
|
||||
} else if (version.number.length != dbVersion.number.length) {
|
||||
this.emit('version', version, versionDir, 'badVersion');
|
||||
else
|
||||
apply = version.apply;
|
||||
continue;
|
||||
}
|
||||
|
||||
apply = version.apply;
|
||||
if (apply) showLog = true;
|
||||
if (showLog) this.emit('version', version, versionDir);
|
||||
if (!apply) continue;
|
||||
|
@ -359,9 +371,12 @@ class Push extends Command {
|
|||
const oldSql = await engine.fetchRoutine(type, schema, name);
|
||||
const oldSum = engine.getShaSum(type, schema, name);
|
||||
|
||||
const localRemote = opts.remote == null
|
||||
|| opts.localRemotes?.indexOf(opts.remote) !== -1;
|
||||
|
||||
const isMockFn = type == 'function'
|
||||
&& schema == opts.versionSchema
|
||||
&& opts.remote == 'local'
|
||||
&& localRemote
|
||||
&& opts.mockDate
|
||||
&& opts.mockFunctions
|
||||
&& opts.mockFunctions.indexOf(name) !== -1;
|
||||
|
|
|
@ -20,7 +20,8 @@ class Run extends Command {
|
|||
ci: 'Workaround for continuous integration system',
|
||||
network: 'Docker network to attach container to',
|
||||
random: 'Whether to use a random container name and port',
|
||||
tmpfs: 'Whether to use tmpfs mount for MySQL data'
|
||||
tmpfs: 'Whether to use tmpfs mount for MySQL data',
|
||||
keep: 'Keep container on failure'
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -29,12 +30,14 @@ class Run extends Command {
|
|||
ci: 'c',
|
||||
network: 'n',
|
||||
random: 'r',
|
||||
tmpfs: 't'
|
||||
tmpfs: 't',
|
||||
keep: 'k'
|
||||
},
|
||||
boolean: [
|
||||
'ci',
|
||||
'random',
|
||||
'tmpfs'
|
||||
'tmpfs',
|
||||
'keep'
|
||||
]
|
||||
};
|
||||
|
||||
|
@ -118,92 +121,101 @@ class Run extends Command {
|
|||
detach: true
|
||||
});
|
||||
const ct = await docker.run(opts.code, null, runOptions);
|
||||
const server = new Server(ct, dbConfig);
|
||||
|
||||
if (isRandom) {
|
||||
try {
|
||||
const server = new Server(ct, dbConfig);
|
||||
|
||||
const useCustom = opts.ci || opts.network
|
||||
if (isRandom || useCustom) {
|
||||
try {
|
||||
const netSettings = await ct.inspect({
|
||||
format: '{{json .NetworkSettings}}'
|
||||
});
|
||||
|
||||
if (useCustom) {
|
||||
dbConfig.host = opts.network
|
||||
? netSettings.Networks[opts.network].IPAddress
|
||||
: netSettings.Gateway;
|
||||
dbConfig.port = 3306;
|
||||
} else
|
||||
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
|
||||
} catch (err) {
|
||||
await server.rm();
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
this.emit('waitingDb');
|
||||
await server.wait();
|
||||
const conn = await myt.createConnection();
|
||||
|
||||
// Mock date functions
|
||||
|
||||
this.emit('mockingDate');
|
||||
const mockDateScript = path.join(dumpDir, 'mockDate.sql');
|
||||
|
||||
if (opts.mockDate) {
|
||||
if (!await fs.pathExists(mockDateScript))
|
||||
throw new Error(`Date mock enabled but mock script does not exist: ${mockDateScript}`);
|
||||
|
||||
let sql = await fs.readFile(mockDateScript, 'utf8');
|
||||
sql = sql.replace(/@mockDate/g, SqlString.escape(opts.mockDate));
|
||||
await connExt.multiQuery(conn, sql);
|
||||
}
|
||||
|
||||
// Apply changes
|
||||
|
||||
const hasTriggers = await fs.exists(`${dumpDataDir}/triggers.sql`);
|
||||
|
||||
Object.assign(opts, {
|
||||
triggers: !hasTriggers,
|
||||
commit: true,
|
||||
dbConfig
|
||||
});
|
||||
await myt.run(Push, opts);
|
||||
|
||||
// Apply fixtures
|
||||
|
||||
this.emit('applyingFixtures');
|
||||
const fixturesFiles = [
|
||||
'fixtures.before',
|
||||
'.fixtures',
|
||||
'fixtures.after',
|
||||
'fixtures.local'
|
||||
]
|
||||
for (const file of fixturesFiles) {
|
||||
if (!await fs.exists(`${dumpDir}/${file}.sql`)) continue;
|
||||
await ct.exec(null, 'docker-import.sh',
|
||||
[`/workspace/dump/${file}`],
|
||||
'spawn',
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
// Create triggers
|
||||
|
||||
if (!hasTriggers) {
|
||||
this.emit('creatingTriggers');
|
||||
|
||||
for (const schema of opts.schemas) {
|
||||
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
|
||||
if (!await fs.pathExists(triggersPath))
|
||||
continue;
|
||||
|
||||
const triggersDir = await fs.readdir(triggersPath);
|
||||
for (const triggerFile of triggersDir)
|
||||
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
||||
}
|
||||
}
|
||||
|
||||
await conn.end();
|
||||
return server;
|
||||
} catch (err) {
|
||||
try {
|
||||
const netSettings = await ct.inspect({
|
||||
format: '{{json .NetworkSettings}}'
|
||||
});
|
||||
|
||||
if (opts.ci) {
|
||||
dbConfig.host = opts.network
|
||||
? netSettings.Networks[opts.network].IPAddress
|
||||
: netSettings.Gateway;
|
||||
dbConfig.port = 3306;
|
||||
} else
|
||||
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
|
||||
} catch (err) {
|
||||
await server.rm();
|
||||
throw err;
|
||||
}
|
||||
if (!opts.keep) await ct.rm({force: true});
|
||||
} catch (e) {}
|
||||
throw err;
|
||||
}
|
||||
|
||||
this.emit('waitingDb');
|
||||
await server.wait();
|
||||
const conn = await myt.createConnection();
|
||||
|
||||
// Mock date functions
|
||||
|
||||
this.emit('mockingDate');
|
||||
const mockDateScript = path.join(dumpDir, 'mockDate.sql');
|
||||
|
||||
if (opts.mockDate) {
|
||||
if (!await fs.pathExists(mockDateScript))
|
||||
throw new Error(`Date mock enabled but mock script does not exist: ${mockDateScript}`);
|
||||
|
||||
let sql = await fs.readFile(mockDateScript, 'utf8');
|
||||
sql = sql.replace(/@mockDate/g, SqlString.escape(opts.mockDate));
|
||||
await connExt.multiQuery(conn, sql);
|
||||
}
|
||||
|
||||
// Apply changes
|
||||
|
||||
const hasTriggers = await fs.exists(`${dumpDataDir}/triggers.sql`);
|
||||
|
||||
Object.assign(opts, {
|
||||
triggers: !hasTriggers,
|
||||
commit: true,
|
||||
dbConfig
|
||||
});
|
||||
await myt.run(Push, opts);
|
||||
|
||||
// Apply fixtures
|
||||
|
||||
this.emit('applyingFixtures');
|
||||
const fixturesFiles = [
|
||||
'fixtures.before',
|
||||
'.fixtures',
|
||||
'fixtures.after',
|
||||
'fixtures.local'
|
||||
]
|
||||
for (const file of fixturesFiles) {
|
||||
if (!await fs.exists(`${dumpDir}/${file}.sql`)) continue;
|
||||
await ct.exec(null, 'docker-import.sh',
|
||||
[`/workspace/dump/${file}`],
|
||||
'spawn',
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
// Create triggers
|
||||
|
||||
if (!hasTriggers) {
|
||||
this.emit('creatingTriggers');
|
||||
|
||||
for (const schema of opts.schemas) {
|
||||
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
|
||||
if (!await fs.pathExists(triggersPath))
|
||||
continue;
|
||||
|
||||
const triggersDir = await fs.readdir(triggersPath);
|
||||
for (const triggerFile of triggersDir)
|
||||
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
||||
}
|
||||
}
|
||||
|
||||
await conn.end();
|
||||
return server;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const Myt = require('./myt');
|
||||
const Command = require('./lib/command');
|
||||
const fs = require('fs-extra');
|
||||
const SqlString = require('sqlstring');
|
||||
|
||||
/**
|
||||
* Creates a new version.
|
||||
|
@ -9,18 +10,23 @@ class Version extends Command {
|
|||
static usage = {
|
||||
description: 'Creates a new version',
|
||||
params: {
|
||||
name: 'Name for the new version'
|
||||
name: 'Name for the new version',
|
||||
deprecate: 'Whether to generate sql to delete deprecated objects'
|
||||
},
|
||||
operand: 'name'
|
||||
};
|
||||
|
||||
static opts = {
|
||||
alias: {
|
||||
name: 'n'
|
||||
name: 'n',
|
||||
deprecate: 'p'
|
||||
},
|
||||
string: [
|
||||
'name'
|
||||
],
|
||||
boolean: [
|
||||
'deprecate'
|
||||
],
|
||||
default: {
|
||||
remote: 'production'
|
||||
}
|
||||
|
@ -36,7 +42,8 @@ class Version extends Command {
|
|||
},
|
||||
versionCreated: function(versionName) {
|
||||
console.log(`New version created: ${versionName}`);
|
||||
}
|
||||
},
|
||||
deprecate: 'Generating SQL for deprecated objects deletion.'
|
||||
};
|
||||
|
||||
async run(myt, opts) {
|
||||
|
@ -121,10 +128,16 @@ class Version extends Command {
|
|||
[opts.code, newVersion]
|
||||
);
|
||||
await fs.mkdir(newVersionDir);
|
||||
await fs.writeFile(
|
||||
`${newVersionDir}/00-firstScript.sql`,
|
||||
'-- Place your SQL code here\n'
|
||||
);
|
||||
|
||||
if (opts.deprecate) {
|
||||
this.emit('deprecate');
|
||||
await deprecate(conn, opts, newVersionDir);
|
||||
} else
|
||||
await fs.writeFile(
|
||||
`${newVersionDir}/00-firstScript.sql`,
|
||||
'-- Place your SQL code here\n'
|
||||
);
|
||||
|
||||
this.emit('versionCreated', versionFolder);
|
||||
|
||||
await conn.query('COMMIT');
|
||||
|
@ -137,6 +150,105 @@ class Version extends Command {
|
|||
}
|
||||
}
|
||||
|
||||
async function deprecate(conn, opts, newVersionDir) {
|
||||
const now = new Date();
|
||||
const minDeprecDate = new Date(now.getTime() - opts.deprecRetentionPeriod * 24 * 60 * 60 * 1000);
|
||||
const deprecMarkRegex = opts.deprecMarkRegex;
|
||||
const deprecCommentRegex = opts.deprecCommentRegex;
|
||||
const deprecDateRegex = opts.deprecDateRegex;
|
||||
const filePath = `${newVersionDir}/00-deprecate.sql`;
|
||||
|
||||
// Generate the drops of the primary keys
|
||||
const [primaryKeys] = await conn.query(`
|
||||
SELECT c.TABLE_SCHEMA 'schema', c.TABLE_NAME 'table'
|
||||
FROM information_schema.COLUMNS c
|
||||
LEFT JOIN information_schema.VIEWS v ON v.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND v.TABLE_NAME = c.TABLE_NAME
|
||||
JOIN information_schema.STATISTICS s ON s.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND s.TABLE_NAME = c.TABLE_NAME
|
||||
AND s.COLUMN_NAME = c.COLUMN_NAME
|
||||
WHERE c.COLUMN_NAME REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND c.COLUMN_COMMENT REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND REGEXP_SUBSTR(c.COLUMN_COMMENT, ? COLLATE utf8mb4_unicode_ci) < ?
|
||||
AND v.TABLE_NAME IS NULL
|
||||
AND s.INDEX_NAME = 'PRIMARY'
|
||||
`, [deprecMarkRegex, deprecCommentRegex, deprecDateRegex, minDeprecDate]);
|
||||
|
||||
primaryKeys.map(async row => {
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
'ALTER TABLE ' + SqlString.escapeId(row.schema, true) + '.' +
|
||||
SqlString.escapeId(row.table, true) + ' DROP PRIMARY KEY;\n'
|
||||
);
|
||||
});
|
||||
|
||||
// Generate the drops of the foreign keys
|
||||
const [foreignKeys] = await conn.query(`
|
||||
SELECT c.TABLE_SCHEMA 'schema', c.TABLE_NAME 'table', kcu.CONSTRAINT_NAME 'constraint'
|
||||
FROM information_schema.COLUMNS c
|
||||
LEFT JOIN information_schema.VIEWS v ON v.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND v.TABLE_NAME = c.TABLE_NAME
|
||||
JOIN information_schema.KEY_COLUMN_USAGE kcu ON kcu.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND kcu.TABLE_NAME = c.TABLE_NAME
|
||||
AND kcu.COLUMN_NAME = c.COLUMN_NAME
|
||||
WHERE c.COLUMN_NAME REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND c.COLUMN_COMMENT REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND REGEXP_SUBSTR(c.COLUMN_COMMENT, ? COLLATE utf8mb4_unicode_ci) < ?
|
||||
AND v.TABLE_NAME IS NULL
|
||||
AND kcu.REFERENCED_COLUMN_NAME IS NOT NULL
|
||||
`, [deprecMarkRegex, deprecCommentRegex, deprecDateRegex, minDeprecDate]);
|
||||
|
||||
foreignKeys.map(async row => {
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
'ALTER TABLE ' + SqlString.escapeId(row.schema, true) + '.' +
|
||||
SqlString.escapeId(row.table, true) + ' DROP FOREIGN KEY ' +
|
||||
SqlString.escapeId(row.constraint, true) + ';\n'
|
||||
);
|
||||
});
|
||||
|
||||
// Generate the drops of the columns
|
||||
const [columns] = await conn.query(`
|
||||
SELECT c.TABLE_SCHEMA 'schema', c.TABLE_NAME 'table', c.COLUMN_NAME 'column'
|
||||
FROM information_schema.COLUMNS c
|
||||
LEFT JOIN information_schema.VIEWS v ON v.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND v.TABLE_NAME = c.TABLE_NAME
|
||||
LEFT JOIN information_schema.KEY_COLUMN_USAGE kcu ON kcu.TABLE_SCHEMA = c.TABLE_SCHEMA
|
||||
AND kcu.TABLE_NAME = c.TABLE_NAME
|
||||
AND kcu.COLUMN_NAME = c.COLUMN_NAME
|
||||
WHERE c.COLUMN_NAME REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND c.COLUMN_COMMENT REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND REGEXP_SUBSTR(c.COLUMN_COMMENT, ? COLLATE utf8mb4_unicode_ci) <?
|
||||
AND v.TABLE_NAME IS NULL
|
||||
`, [deprecMarkRegex, deprecCommentRegex, deprecDateRegex, minDeprecDate]);
|
||||
|
||||
columns.map(async row => {
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
'ALTER TABLE ' + SqlString.escapeId(row.schema, true) + '.' +
|
||||
SqlString.escapeId(row.table, true) + ' DROP COLUMN ' +
|
||||
SqlString.escapeId(row.column, true) + ';\n'
|
||||
);
|
||||
});
|
||||
|
||||
// Generate the drops of the tables
|
||||
const [tables] = await conn.query(`
|
||||
SELECT TABLE_SCHEMA 'schema', TABLE_NAME 'table'
|
||||
FROM information_schema.TABLES
|
||||
WHERE TABLE_NAME REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND TABLE_COMMENT REGEXP ? COLLATE utf8mb4_unicode_ci
|
||||
AND REGEXP_SUBSTR(TABLE_COMMENT, ? COLLATE utf8mb4_unicode_ci) < ?
|
||||
`, [deprecMarkRegex, deprecCommentRegex, deprecDateRegex, minDeprecDate]);
|
||||
|
||||
tables.map(async row => {
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
'DROP TABLE ' + SqlString.escapeId(row.schema, true) + '.' +
|
||||
SqlString.escapeId(row.table, true) + ';\n'
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function randomName() {
|
||||
const color = random(colors);
|
||||
let plant = random(plants);
|
||||
|
|
2
myt.js
2
myt.js
|
@ -19,7 +19,7 @@ class Myt {
|
|||
params: {
|
||||
remote: 'Name of remote to use',
|
||||
workspace: 'The base directory of the project',
|
||||
debug: 'Wether to enable debug mode',
|
||||
debug: 'Whether to enable debug mode',
|
||||
version: 'Display the version number and exit',
|
||||
help: 'Display this help message'
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.6.3",
|
||||
"version": "1.6.13",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.6.3",
|
||||
"version": "1.6.13",
|
||||
"license": "GPL-3.0",
|
||||
"dependencies": {
|
||||
"@sqltools/formatter": "^1.2.5",
|
||||
|
@ -1169,6 +1169,7 @@
|
|||
"resolved": "https://registry.npmjs.org/nodegit/-/nodegit-0.27.0.tgz",
|
||||
"integrity": "sha512-E9K4gPjWiA0b3Tx5lfWCzG7Cvodi2idl3V5UD2fZrOrHikIfrN7Fc2kWLtMUqqomyoToYJLeIC8IV7xb1CYRLA==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fs-extra": "^7.0.0",
|
||||
"got": "^10.7.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "@verdnatura/myt",
|
||||
"version": "1.6.3",
|
||||
"version": "1.6.13",
|
||||
"author": "Verdnatura Levante SL",
|
||||
"description": "MySQL version control",
|
||||
"license": "GPL-3.0",
|
||||
|
|
Loading…
Reference in New Issue