2020-11-16 13:23:28 +00:00
|
|
|
|
2020-12-04 16:30:26 +00:00
|
|
|
const MyVC = require('./myvc');
|
2020-11-14 01:38:56 +00:00
|
|
|
const fs = require('fs-extra');
|
|
|
|
const ejs = require('ejs');
|
2021-10-22 14:11:03 +00:00
|
|
|
const shajs = require('sha.js');
|
2021-10-14 08:34:40 +00:00
|
|
|
const nodegit = require('nodegit');
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
class Pull {
|
2021-10-22 14:11:03 +00:00
|
|
|
get myOpts() {
|
|
|
|
return {
|
|
|
|
alias: {
|
|
|
|
force: 'f',
|
|
|
|
checkout: 'c'
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
async run(myvc, opts) {
|
|
|
|
const conn = await myvc.dbConnect();
|
2021-10-22 14:11:03 +00:00
|
|
|
const repo = await myvc.openRepo();
|
|
|
|
|
|
|
|
if (!opts.force) {
|
|
|
|
async function hasChanges(diff) {
|
|
|
|
if (diff)
|
|
|
|
for (const patch of await diff.patches()) {
|
|
|
|
const match = patch
|
|
|
|
.newFile()
|
|
|
|
.path()
|
|
|
|
.match(/^routines\/(.+)\.sql$/);
|
|
|
|
if (match) return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for unstaged changes
|
|
|
|
|
|
|
|
const unstagedDiff = await myvc.getUnstaged(repo);
|
|
|
|
|
|
|
|
if (await hasChanges(unstagedDiff))
|
|
|
|
throw new Error('You have unstaged changes, save them before pull');
|
|
|
|
|
|
|
|
// Check for staged changes
|
|
|
|
|
|
|
|
const stagedDiff = await myvc.getStaged(repo);
|
|
|
|
|
|
|
|
if (await hasChanges(stagedDiff))
|
|
|
|
throw new Error('You have staged changes, save them before pull');
|
|
|
|
}
|
|
|
|
|
|
|
|
// Checkout to remote commit
|
|
|
|
|
|
|
|
if (opts.checkout) {
|
|
|
|
const version = await myvc.fetchDbVersion();
|
|
|
|
|
|
|
|
if (version && version.gitCommit) {
|
|
|
|
const now = parseInt(new Date().toJSON());
|
|
|
|
const branchName = `myvc-pull_${now}`;
|
|
|
|
console.log(`Creating branch '${branchName}' from database commit.`);
|
|
|
|
const commit = await repo.getCommit(version.gitCommit);
|
|
|
|
const branch = await nodegit.Branch.create(repo,
|
|
|
|
`myvc-pull_${now}`, commit, () => {});
|
|
|
|
await repo.checkoutBranch(branch);
|
|
|
|
}
|
2021-10-14 08:34:40 +00:00
|
|
|
}
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2021-10-22 14:11:03 +00:00
|
|
|
// Export routines to SQL files
|
|
|
|
|
|
|
|
console.log(`Incorporating routine changes.`);
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
for (const exporter of exporters)
|
|
|
|
await exporter.init();
|
|
|
|
|
|
|
|
const exportDir = `${opts.workspace}/routines`;
|
2020-12-19 01:06:06 +00:00
|
|
|
if (!await fs.pathExists(exportDir))
|
|
|
|
await fs.mkdir(exportDir);
|
|
|
|
|
|
|
|
const schemas = await fs.readdir(exportDir);
|
|
|
|
for (const schema of schemas) {
|
|
|
|
if (opts.schemas.indexOf(schema) == -1)
|
|
|
|
await fs.remove(`${exportDir}/${schema}`, {recursive: true});
|
|
|
|
}
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2021-10-22 14:11:03 +00:00
|
|
|
let shaSums;
|
|
|
|
const shaFile = `${opts.workspace}/.shasums.json`;
|
|
|
|
|
|
|
|
if (await fs.pathExists(shaFile))
|
|
|
|
shaSums = JSON.parse(await fs.readFile(shaFile, 'utf8'));
|
|
|
|
else
|
|
|
|
shaSums = {};
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
for (const schema of opts.schemas) {
|
|
|
|
let schemaDir = `${exportDir}/${schema}`;
|
|
|
|
|
|
|
|
if (!await fs.pathExists(schemaDir))
|
|
|
|
await fs.mkdir(schemaDir);
|
|
|
|
|
2021-10-22 14:11:03 +00:00
|
|
|
let schemaSums = shaSums[schema];
|
|
|
|
if (!schemaSums) schemaSums = shaSums[schema] = {};
|
|
|
|
|
|
|
|
for (const exporter of exporters) {
|
|
|
|
const objectType = exporter.objectType;
|
|
|
|
|
|
|
|
let objectSums = schemaSums[objectType];
|
|
|
|
if (!objectSums) objectSums = schemaSums[objectType] = {};
|
|
|
|
|
|
|
|
await exporter.export(conn, exportDir, schema, objectSums);
|
|
|
|
}
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
2021-10-22 14:11:03 +00:00
|
|
|
|
|
|
|
await fs.writeFile(shaFile, JSON.stringify(shaSums, null, ' '));
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-14 01:38:56 +00:00
|
|
|
class Exporter {
|
2021-10-22 14:11:03 +00:00
|
|
|
constructor(objectType) {
|
|
|
|
this.objectType = objectType;
|
|
|
|
this.dstDir = `${objectType}s`;
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
async init() {
|
2021-10-22 14:11:03 +00:00
|
|
|
const templateDir = `${__dirname}/exporters/${this.objectType}`;
|
2020-12-02 07:35:26 +00:00
|
|
|
this.query = await fs.readFile(`${templateDir}.sql`, 'utf8');
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
const templateFile = await fs.readFile(`${templateDir}.ejs`, 'utf8');
|
2020-11-14 01:38:56 +00:00
|
|
|
this.template = ejs.compile(templateFile);
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
if (await fs.pathExists(`${templateDir}.js`))
|
2020-11-14 01:38:56 +00:00
|
|
|
this.formatter = require(`${templateDir}.js`);
|
|
|
|
}
|
|
|
|
|
2021-10-22 14:11:03 +00:00
|
|
|
async export(conn, exportDir, schema, shaSums) {
|
2020-12-02 07:35:26 +00:00
|
|
|
const [res] = await conn.query(this.query, [schema]);
|
|
|
|
if (!res.length) return;
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-11-15 18:24:25 +00:00
|
|
|
const routineDir = `${exportDir}/${schema}/${this.dstDir}`;
|
2020-12-02 07:35:26 +00:00
|
|
|
if (!await fs.pathExists(routineDir))
|
|
|
|
await fs.mkdir(routineDir);
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-12-19 01:06:06 +00:00
|
|
|
const routineSet = new Set();
|
|
|
|
for (const params of res)
|
|
|
|
routineSet.add(params.name);
|
|
|
|
|
|
|
|
const routines = await fs.readdir(routineDir);
|
|
|
|
for (const routineFile of routines) {
|
|
|
|
const match = routineFile.match(/^(.*)\.sql$/);
|
|
|
|
if (!match) continue;
|
|
|
|
const routine = match[1];
|
|
|
|
if (!routineSet.has(routine))
|
|
|
|
await fs.remove(`${routineDir}/${routine}.sql`);
|
|
|
|
}
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
for (const params of res) {
|
2020-11-14 01:38:56 +00:00
|
|
|
if (this.formatter)
|
|
|
|
this.formatter(params, schema)
|
|
|
|
|
2021-10-22 14:11:03 +00:00
|
|
|
const routineName = params.name;
|
|
|
|
const split = params.definer.split('@');
|
|
|
|
params.schema = conn.escapeId(schema);
|
|
|
|
params.name = conn.escapeId(routineName, true);
|
|
|
|
params.definer =
|
|
|
|
`${conn.escapeId(split[0], true)}@${conn.escapeId(split[1], true)}`;
|
|
|
|
|
2020-12-19 01:06:06 +00:00
|
|
|
const sql = this.template(params);
|
2021-10-22 14:11:03 +00:00
|
|
|
const routineFile = `${routineDir}/${routineName}.sql`;
|
|
|
|
|
|
|
|
const shaSum = shajs('sha256')
|
|
|
|
.update(JSON.stringify(sql))
|
|
|
|
.digest('hex');
|
|
|
|
shaSums[routineName] = shaSum;
|
|
|
|
|
2020-12-19 01:06:06 +00:00
|
|
|
let changed = true;
|
|
|
|
|
|
|
|
if (await fs.pathExists(routineFile)) {
|
|
|
|
const currentSql = await fs.readFile(routineFile, 'utf8');
|
2021-10-22 14:11:03 +00:00
|
|
|
changed = shaSums[routineName] !== shaSum;;
|
2020-12-19 01:06:06 +00:00
|
|
|
}
|
2021-10-22 14:11:03 +00:00
|
|
|
if (changed) {
|
2020-12-19 01:06:06 +00:00
|
|
|
await fs.writeFile(routineFile, sql);
|
2021-10-22 14:11:03 +00:00
|
|
|
shaSums[routineName] = shaSum;
|
|
|
|
}
|
2020-11-14 01:38:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-11-15 18:24:25 +00:00
|
|
|
const exporters = [
|
2020-11-14 01:38:56 +00:00
|
|
|
new Exporter('function'),
|
|
|
|
new Exporter('procedure'),
|
|
|
|
new Exporter('view'),
|
|
|
|
new Exporter('trigger'),
|
|
|
|
new Exporter('event')
|
|
|
|
];
|
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
module.exports = Pull;
|
2020-11-14 01:38:56 +00:00
|
|
|
|
2020-12-02 07:35:26 +00:00
|
|
|
if (require.main === module)
|
|
|
|
new MyVC().run(Pull);
|