2022-12-21 13:17:50 +00:00
|
|
|
const Myt = require('./myt');
|
2022-12-21 12:34:17 +00:00
|
|
|
const Command = require('./lib/command');
|
2022-12-21 13:17:50 +00:00
|
|
|
const Push = require('./myt-push');
|
2022-12-21 12:34:17 +00:00
|
|
|
const docker = require('./lib/docker');
|
2020-12-02 07:35:26 +00:00
|
|
|
const fs = require('fs-extra');
|
2020-12-04 09:15:29 +00:00
|
|
|
const path = require('path');
|
2022-12-21 12:34:17 +00:00
|
|
|
const Server = require('./lib/server');
|
2022-12-29 09:15:02 +00:00
|
|
|
const connExt = require('./lib/conn');
|
2023-03-15 17:13:01 +00:00
|
|
|
const SqlString = require('sqlstring');
|
2020-12-02 07:35:26 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Builds the database image and runs a container. It only rebuilds the
|
|
|
|
* image when fixtures have been modified or when the day on which the
|
|
|
|
* image was built is different to today. Some workarounds have been used
|
|
|
|
* to avoid a bug with OverlayFS driver on MacOS.
|
|
|
|
*/
|
2022-12-21 12:34:17 +00:00
|
|
|
class Run extends Command {
|
|
|
|
static usage = {
|
|
|
|
description: 'Build and start local database server container',
|
|
|
|
params: {
|
|
|
|
ci: 'Workaround for continuous integration system',
|
|
|
|
random: 'Whether to use a random container name or port'
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-12-29 09:15:02 +00:00
|
|
|
static opts = {
|
2022-12-21 12:34:17 +00:00
|
|
|
alias: {
|
|
|
|
ci: 'c',
|
|
|
|
random: 'r'
|
|
|
|
},
|
|
|
|
boolean: [
|
|
|
|
'ci',
|
|
|
|
'random'
|
|
|
|
]
|
|
|
|
};
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2022-12-21 13:17:50 +00:00
|
|
|
async run(myt, opts) {
|
2022-12-21 12:34:17 +00:00
|
|
|
const dumpDir = opts.dumpDir;
|
2023-08-11 13:41:03 +00:00
|
|
|
const dumpDataDir = path.join(dumpDir, '.dump');
|
2022-06-09 16:07:58 +00:00
|
|
|
const serverDir = path.join(__dirname, 'server');
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
if (!await fs.pathExists(`${dumpDataDir}/structure.sql`))
|
2020-12-05 21:50:45 +00:00
|
|
|
throw new Error('To run local database you have to create a dump first');
|
|
|
|
|
2022-12-23 13:47:44 +00:00
|
|
|
// Build base image
|
2022-06-09 16:07:58 +00:00
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
let basePath = dumpDir;
|
|
|
|
let baseDockerfile = path.join(dumpDir, 'Dockerfile');
|
2022-10-10 08:58:53 +00:00
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
if (!await fs.pathExists(baseDockerfile)) {
|
|
|
|
basePath = serverDir;
|
|
|
|
baseDockerfile = path.join(serverDir, 'Dockerfile.base');
|
|
|
|
}
|
|
|
|
|
|
|
|
await docker.build(basePath, {
|
2022-12-21 13:17:50 +00:00
|
|
|
tag: 'myt/base',
|
2023-01-25 17:16:43 +00:00
|
|
|
file: baseDockerfile
|
2022-06-09 16:07:58 +00:00
|
|
|
}, opts.debug);
|
|
|
|
|
2022-12-23 13:47:44 +00:00
|
|
|
// Build server image
|
2022-06-09 16:07:58 +00:00
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
await docker.build(serverDir, {
|
2022-12-21 13:17:50 +00:00
|
|
|
tag: 'myt/server',
|
2022-12-15 23:54:23 +00:00
|
|
|
file: path.join(serverDir, 'Dockerfile.server')
|
2020-12-04 09:15:29 +00:00
|
|
|
}, opts.debug);
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2022-06-09 16:07:58 +00:00
|
|
|
// Build dump image
|
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
const dumpContext = path.join(opts.mytDir, 'dump');
|
|
|
|
await docker.build(dumpContext, {
|
2020-12-04 09:15:29 +00:00
|
|
|
tag: opts.code,
|
2022-12-23 13:47:44 +00:00
|
|
|
file: path.join(serverDir, 'Dockerfile.dump')
|
2020-12-04 09:15:29 +00:00
|
|
|
}, opts.debug);
|
|
|
|
|
2022-06-09 16:07:58 +00:00
|
|
|
// Run container
|
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
const isRandom = opts.random;
|
|
|
|
const dbConfig = Object.assign({}, opts.dbConfig);
|
2020-12-02 07:35:26 +00:00
|
|
|
|
|
|
|
let runOptions;
|
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
if (isRandom)
|
2020-12-02 07:35:26 +00:00
|
|
|
runOptions = {publish: '3306'};
|
|
|
|
else {
|
|
|
|
runOptions = {
|
2020-12-04 09:15:29 +00:00
|
|
|
name: opts.code,
|
|
|
|
publish: `3306:${dbConfig.port}`
|
2020-12-02 07:35:26 +00:00
|
|
|
};
|
|
|
|
try {
|
2022-12-21 12:34:17 +00:00
|
|
|
const server = new Server(new docker.Container(opts.code));
|
2020-12-04 16:30:26 +00:00
|
|
|
await server.rm();
|
2020-12-02 07:35:26 +00:00
|
|
|
} catch (e) {}
|
|
|
|
}
|
|
|
|
|
|
|
|
const runChown = process.platform != 'linux';
|
|
|
|
|
|
|
|
Object.assign(runOptions, null, {
|
|
|
|
env: `RUN_CHOWN=${runChown}`,
|
2022-12-21 12:34:17 +00:00
|
|
|
detach: true,
|
2022-12-23 13:47:44 +00:00
|
|
|
volume: `${this.opts.mytDir}:/workspace`
|
2020-12-02 07:35:26 +00:00
|
|
|
});
|
2020-12-04 09:15:29 +00:00
|
|
|
const ct = await docker.run(opts.code, null, runOptions);
|
|
|
|
const server = new Server(ct, dbConfig);
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2022-12-21 12:34:17 +00:00
|
|
|
if (isRandom) {
|
|
|
|
try {
|
2020-12-02 07:35:26 +00:00
|
|
|
const netSettings = await ct.inspect({
|
2020-12-04 09:15:29 +00:00
|
|
|
format: '{{json .NetworkSettings}}'
|
2020-12-02 07:35:26 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
if (opts.ci)
|
2020-12-04 09:15:29 +00:00
|
|
|
dbConfig.host = netSettings.Gateway;
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
|
2022-12-21 12:34:17 +00:00
|
|
|
} catch (err) {
|
2020-12-04 09:15:29 +00:00
|
|
|
await server.rm();
|
2022-12-21 12:34:17 +00:00
|
|
|
throw err;
|
|
|
|
}
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
2020-12-04 09:15:29 +00:00
|
|
|
|
|
|
|
await server.wait();
|
2023-08-11 13:41:03 +00:00
|
|
|
const conn = await myt.createConnection();
|
|
|
|
|
|
|
|
// Mock date functions
|
|
|
|
|
|
|
|
console.log('Mocking date functions.');
|
|
|
|
const mockDateScript = path.join(dumpDir, 'mockDate.sql');
|
|
|
|
|
|
|
|
if (opts.mockDate) {
|
|
|
|
if (!await fs.pathExists(mockDateScript))
|
|
|
|
throw new Error(`Date mock enabled but mock script does not exist: ${mockDateScript}`);
|
|
|
|
|
|
|
|
let sql = await fs.readFile(mockDateScript, 'utf8');
|
|
|
|
sql = sql.replace(/@mockDate/g, SqlString.escape(opts.mockDate));
|
|
|
|
await connExt.multiQuery(conn, sql);
|
|
|
|
}
|
2022-12-21 12:34:17 +00:00
|
|
|
|
|
|
|
// Apply changes
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
const hasTriggers = await fs.exists(`${dumpDataDir}/triggers.sql`);
|
|
|
|
|
2022-12-21 12:34:17 +00:00
|
|
|
Object.assign(opts, {
|
2023-08-11 13:41:03 +00:00
|
|
|
triggers: hasTriggers,
|
2022-12-21 12:34:17 +00:00
|
|
|
commit: true,
|
|
|
|
dbConfig
|
|
|
|
});
|
2022-12-21 13:17:50 +00:00
|
|
|
await myt.runCommand(Push, opts);
|
2022-12-21 12:34:17 +00:00
|
|
|
|
|
|
|
// Apply fixtures
|
|
|
|
|
|
|
|
console.log('Applying fixtures.');
|
2023-08-11 13:41:03 +00:00
|
|
|
const fixturesFiles = [
|
|
|
|
'fixtures.before',
|
|
|
|
'.fixtures',
|
|
|
|
'fixtures.after',
|
|
|
|
'fixtures.local'
|
|
|
|
]
|
|
|
|
for (const file of fixturesFiles) {
|
|
|
|
if (!await fs.exists(`${dumpDir}/${file}.sql`)) continue;
|
|
|
|
await ct.exec(null, 'docker-import.sh',
|
|
|
|
[`/workspace/dump/${file}`],
|
|
|
|
'spawn',
|
|
|
|
true
|
|
|
|
);
|
2022-12-21 12:34:17 +00:00
|
|
|
}
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
// Create triggers
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
if (!hasTriggers) {
|
|
|
|
console.log('Creating triggers.');
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
for (const schema of opts.schemas) {
|
|
|
|
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
|
|
|
|
if (!await fs.pathExists(triggersPath))
|
|
|
|
continue;
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
const triggersDir = await fs.readdir(triggersPath);
|
|
|
|
for (const triggerFile of triggersDir)
|
|
|
|
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
|
|
|
}
|
2023-03-15 17:13:01 +00:00
|
|
|
}
|
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
return server;
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = Run;
|
|
|
|
|
|
|
|
if (require.main === module)
|
2022-12-21 13:17:50 +00:00
|
|
|
new Myt().run(Run);
|