myt/myt-run.js

205 lines
5.8 KiB
JavaScript
Raw Permalink Normal View History

2022-12-21 13:17:50 +00:00
const Myt = require('./myt');
const Command = require('./lib/command');
2022-12-21 13:17:50 +00:00
const Push = require('./myt-push');
const docker = require('./lib/docker');
2020-12-02 07:35:26 +00:00
const fs = require('fs-extra');
2020-12-04 09:15:29 +00:00
const path = require('path');
const Server = require('./lib/server');
const connExt = require('./lib/conn');
const SqlString = require('sqlstring');
2020-12-02 07:35:26 +00:00
/**
* Builds the database image and runs a container. It only rebuilds the image
* when dump have been modified. Some workarounds have been used to avoid a bug
* with OverlayFS driver on MacOS.
2020-12-02 07:35:26 +00:00
*/
class Run extends Command {
static usage = {
description: 'Build and start local database server container',
params: {
ci: 'Workaround for continuous integration system',
random: 'Whether to use a random container name or port'
}
};
static opts = {
alias: {
ci: 'c',
random: 'r'
},
boolean: [
'ci',
'random'
]
};
2020-12-02 07:35:26 +00:00
static reporter = {
buildingImage: 'Building container image.',
runningContainer: 'Running container.',
waitingDb: 'Waiting for MySQL init process.',
mockingDate: 'Mocking date functions.',
applyingFixtures: 'Applying fixtures.',
creatingTriggers: 'Creating triggers.'
};
2022-12-21 13:17:50 +00:00
async run(myt, opts) {
const dumpDir = opts.dumpDir;
const dumpDataDir = path.join(dumpDir, '.dump');
2022-06-09 16:07:58 +00:00
const serverDir = path.join(__dirname, 'server');
if (!await fs.pathExists(`${dumpDataDir}/structure.sql`))
2020-12-05 21:50:45 +00:00
throw new Error('To run local database you have to create a dump first');
// Build base image
2022-06-09 16:07:58 +00:00
this.emit('buildingImage');
let basePath = dumpDir;
let baseDockerfile = path.join(dumpDir, 'Dockerfile');
if (!await fs.pathExists(baseDockerfile)) {
basePath = serverDir;
baseDockerfile = path.join(serverDir, 'Dockerfile.base');
}
await docker.build(basePath, {
2022-12-21 13:17:50 +00:00
tag: 'myt/base',
file: baseDockerfile
2022-06-09 16:07:58 +00:00
}, opts.debug);
// Build server image
2022-06-09 16:07:58 +00:00
await docker.build(serverDir, {
2022-12-21 13:17:50 +00:00
tag: 'myt/server',
file: path.join(serverDir, 'Dockerfile.server')
2020-12-04 09:15:29 +00:00
}, opts.debug);
2020-12-02 07:35:26 +00:00
2022-06-09 16:07:58 +00:00
// Build dump image
const dumpContext = path.join(opts.mytDir, 'dump');
await docker.build(dumpContext, {
2020-12-04 09:15:29 +00:00
tag: opts.code,
file: path.join(serverDir, 'Dockerfile.dump')
2020-12-04 09:15:29 +00:00
}, opts.debug);
2022-06-09 16:07:58 +00:00
// Run container
this.emit('runningContainer');
2020-12-04 09:15:29 +00:00
const isRandom = opts.random;
2024-01-05 09:28:05 +00:00
const dbConfig = opts.dbConfig;
2020-12-02 07:35:26 +00:00
let runOptions;
2020-12-04 09:15:29 +00:00
if (isRandom)
2020-12-02 07:35:26 +00:00
runOptions = {publish: '3306'};
else {
runOptions = {
2020-12-04 09:15:29 +00:00
name: opts.code,
publish: `3306:${dbConfig.port}`
2020-12-02 07:35:26 +00:00
};
try {
const server = new Server(new docker.Container(opts.code));
2020-12-04 16:30:26 +00:00
await server.rm();
2020-12-02 07:35:26 +00:00
} catch (e) {}
}
const runChown = process.platform != 'linux';
Object.assign(runOptions, null, {
env: `RUN_CHOWN=${runChown}`,
detach: true,
volume: `${this.opts.mytDir}:/workspace`
2020-12-02 07:35:26 +00:00
});
2020-12-04 09:15:29 +00:00
const ct = await docker.run(opts.code, null, runOptions);
const server = new Server(ct, dbConfig);
2020-12-02 07:35:26 +00:00
if (isRandom) {
try {
2020-12-02 07:35:26 +00:00
const netSettings = await ct.inspect({
2020-12-04 09:15:29 +00:00
format: '{{json .NetworkSettings}}'
2020-12-02 07:35:26 +00:00
});
if (opts.ci)
2020-12-04 09:15:29 +00:00
dbConfig.host = netSettings.Gateway;
2020-12-02 07:35:26 +00:00
2020-12-04 09:15:29 +00:00
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
} catch (err) {
2020-12-04 09:15:29 +00:00
await server.rm();
throw err;
}
2020-12-02 07:35:26 +00:00
}
2020-12-04 09:15:29 +00:00
this.emit('waitingDb');
2020-12-04 09:15:29 +00:00
await server.wait();
const conn = await myt.createConnection();
// Mock date functions
this.emit('mockingDate');
const mockDateScript = path.join(dumpDir, 'mockDate.sql');
if (opts.mockDate) {
if (!await fs.pathExists(mockDateScript))
throw new Error(`Date mock enabled but mock script does not exist: ${mockDateScript}`);
let sql = await fs.readFile(mockDateScript, 'utf8');
sql = sql.replace(/@mockDate/g, SqlString.escape(opts.mockDate));
await connExt.multiQuery(conn, sql);
}
// Apply changes
const hasTriggers = await fs.exists(`${dumpDataDir}/triggers.sql`);
Object.assign(opts, {
triggers: !hasTriggers,
commit: true,
dbConfig
});
await myt.run(Push, opts);
// Apply fixtures
this.emit('applyingFixtures');
const fixturesFiles = [
'fixtures.before',
'.fixtures',
'fixtures.after',
'fixtures.local'
]
for (const file of fixturesFiles) {
if (!await fs.exists(`${dumpDir}/${file}.sql`)) continue;
await ct.exec(null, 'docker-import.sh',
[`/workspace/dump/${file}`],
'spawn',
true
);
}
// Create triggers
if (!hasTriggers) {
this.emit('creatingTriggers');
for (const schema of opts.schemas) {
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
if (!await fs.pathExists(triggersPath))
continue;
const triggersDir = await fs.readdir(triggersPath);
for (const triggerFile of triggersDir)
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
}
}
2024-01-05 09:28:05 +00:00
await conn.end();
2020-12-04 09:15:29 +00:00
return server;
2020-12-02 07:35:26 +00:00
}
}
module.exports = Run;
if (require.main === module)
new Myt().cli(Run);