2022-12-21 13:17:50 +00:00
|
|
|
const Myt = require('./myt');
|
2022-12-21 12:34:17 +00:00
|
|
|
const Command = require('./lib/command');
|
2022-12-21 13:17:50 +00:00
|
|
|
const Push = require('./myt-push');
|
2022-12-21 12:34:17 +00:00
|
|
|
const docker = require('./lib/docker');
|
2020-12-02 07:35:26 +00:00
|
|
|
const fs = require('fs-extra');
|
2020-12-04 09:15:29 +00:00
|
|
|
const path = require('path');
|
2022-12-21 12:34:17 +00:00
|
|
|
const Server = require('./lib/server');
|
2022-12-29 09:15:02 +00:00
|
|
|
const connExt = require('./lib/conn');
|
2023-03-15 17:13:01 +00:00
|
|
|
const SqlString = require('sqlstring');
|
2020-12-02 07:35:26 +00:00
|
|
|
|
|
|
|
/**
|
2024-01-17 14:14:35 +00:00
|
|
|
* Builds the database image and runs a container. It only rebuilds the image
|
2024-01-19 10:50:24 +00:00
|
|
|
* when dump have been modified. Some workarounds have been used to avoid a bug
|
2024-01-17 14:14:35 +00:00
|
|
|
* with OverlayFS driver on MacOS.
|
2020-12-02 07:35:26 +00:00
|
|
|
*/
|
2022-12-21 12:34:17 +00:00
|
|
|
class Run extends Command {
|
|
|
|
static usage = {
|
|
|
|
description: 'Build and start local database server container',
|
|
|
|
params: {
|
|
|
|
ci: 'Workaround for continuous integration system',
|
2024-01-28 11:33:56 +00:00
|
|
|
network: 'Docker network to attach container to',
|
2024-01-29 18:45:19 +00:00
|
|
|
random: 'Whether to use a random container name and port',
|
|
|
|
tmpfs: 'Whether to use tmpfs mount for MySQL data'
|
2022-12-21 12:34:17 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-12-29 09:15:02 +00:00
|
|
|
static opts = {
|
2022-12-21 12:34:17 +00:00
|
|
|
alias: {
|
|
|
|
ci: 'c',
|
2024-01-28 11:33:56 +00:00
|
|
|
network: 'n',
|
2024-01-29 18:45:19 +00:00
|
|
|
random: 'r',
|
|
|
|
tmpfs: 'r'
|
2022-12-21 12:34:17 +00:00
|
|
|
},
|
|
|
|
boolean: [
|
|
|
|
'ci',
|
|
|
|
'random'
|
|
|
|
]
|
|
|
|
};
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
static reporter = {
|
|
|
|
buildingImage: 'Building container image.',
|
|
|
|
runningContainer: 'Running container.',
|
|
|
|
waitingDb: 'Waiting for MySQL init process.',
|
|
|
|
mockingDate: 'Mocking date functions.',
|
|
|
|
applyingFixtures: 'Applying fixtures.',
|
|
|
|
creatingTriggers: 'Creating triggers.'
|
|
|
|
};
|
|
|
|
|
2022-12-21 13:17:50 +00:00
|
|
|
async run(myt, opts) {
|
2022-12-21 12:34:17 +00:00
|
|
|
const dumpDir = opts.dumpDir;
|
2023-08-11 13:41:03 +00:00
|
|
|
const dumpDataDir = path.join(dumpDir, '.dump');
|
2022-06-09 16:07:58 +00:00
|
|
|
const serverDir = path.join(__dirname, 'server');
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
if (!await fs.pathExists(`${dumpDataDir}/structure.sql`))
|
2020-12-05 21:50:45 +00:00
|
|
|
throw new Error('To run local database you have to create a dump first');
|
|
|
|
|
2022-12-23 13:47:44 +00:00
|
|
|
// Build base image
|
2022-06-09 16:07:58 +00:00
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('buildingImage');
|
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
let basePath = dumpDir;
|
|
|
|
let baseDockerfile = path.join(dumpDir, 'Dockerfile');
|
2022-10-10 08:58:53 +00:00
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
if (!await fs.pathExists(baseDockerfile)) {
|
|
|
|
basePath = serverDir;
|
|
|
|
baseDockerfile = path.join(serverDir, 'Dockerfile.base');
|
|
|
|
}
|
|
|
|
|
|
|
|
await docker.build(basePath, {
|
2022-12-21 13:17:50 +00:00
|
|
|
tag: 'myt/base',
|
2023-01-25 17:16:43 +00:00
|
|
|
file: baseDockerfile
|
2022-06-09 16:07:58 +00:00
|
|
|
}, opts.debug);
|
|
|
|
|
2022-12-23 13:47:44 +00:00
|
|
|
// Build server image
|
2022-06-09 16:07:58 +00:00
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
await docker.build(serverDir, {
|
2022-12-21 13:17:50 +00:00
|
|
|
tag: 'myt/server',
|
2022-12-15 23:54:23 +00:00
|
|
|
file: path.join(serverDir, 'Dockerfile.server')
|
2020-12-04 09:15:29 +00:00
|
|
|
}, opts.debug);
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2022-06-09 16:07:58 +00:00
|
|
|
// Build dump image
|
|
|
|
|
2023-01-25 17:16:43 +00:00
|
|
|
const dumpContext = path.join(opts.mytDir, 'dump');
|
|
|
|
await docker.build(dumpContext, {
|
2020-12-04 09:15:29 +00:00
|
|
|
tag: opts.code,
|
2022-12-23 13:47:44 +00:00
|
|
|
file: path.join(serverDir, 'Dockerfile.dump')
|
2020-12-04 09:15:29 +00:00
|
|
|
}, opts.debug);
|
|
|
|
|
2022-06-09 16:07:58 +00:00
|
|
|
// Run container
|
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('runningContainer');
|
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
const isRandom = opts.random;
|
2024-01-05 09:28:05 +00:00
|
|
|
const dbConfig = opts.dbConfig;
|
2020-12-02 07:35:26 +00:00
|
|
|
|
|
|
|
let runOptions;
|
|
|
|
|
2020-12-04 09:15:29 +00:00
|
|
|
if (isRandom)
|
2020-12-02 07:35:26 +00:00
|
|
|
runOptions = {publish: '3306'};
|
|
|
|
else {
|
|
|
|
runOptions = {
|
2020-12-04 09:15:29 +00:00
|
|
|
name: opts.code,
|
|
|
|
publish: `3306:${dbConfig.port}`
|
2020-12-02 07:35:26 +00:00
|
|
|
};
|
|
|
|
try {
|
2022-12-21 12:34:17 +00:00
|
|
|
const server = new Server(new docker.Container(opts.code));
|
2020-12-04 16:30:26 +00:00
|
|
|
await server.rm();
|
2020-12-02 07:35:26 +00:00
|
|
|
} catch (e) {}
|
|
|
|
}
|
|
|
|
|
2024-01-28 11:33:56 +00:00
|
|
|
if (opts.network)
|
2024-01-29 18:45:19 +00:00
|
|
|
runOptions.network = opts.network;
|
|
|
|
if (opts.tmpfs)
|
|
|
|
runOptions.tmpfs = '/var/lib/mysql';
|
2020-12-02 07:35:26 +00:00
|
|
|
|
|
|
|
Object.assign(runOptions, null, {
|
2024-01-28 13:31:45 +00:00
|
|
|
detach: true
|
2020-12-02 07:35:26 +00:00
|
|
|
});
|
2020-12-04 09:15:29 +00:00
|
|
|
const ct = await docker.run(opts.code, null, runOptions);
|
|
|
|
const server = new Server(ct, dbConfig);
|
2020-12-02 07:35:26 +00:00
|
|
|
|
2022-12-21 12:34:17 +00:00
|
|
|
if (isRandom) {
|
|
|
|
try {
|
2020-12-02 07:35:26 +00:00
|
|
|
const netSettings = await ct.inspect({
|
2020-12-04 09:15:29 +00:00
|
|
|
format: '{{json .NetworkSettings}}'
|
2020-12-02 07:35:26 +00:00
|
|
|
});
|
|
|
|
|
2024-01-28 11:33:56 +00:00
|
|
|
if (opts.ci) {
|
2024-01-28 11:55:49 +00:00
|
|
|
dbConfig.host = opts.network
|
|
|
|
? netSettings.Networks[opts.network].IPAddress
|
2024-01-28 11:46:56 +00:00
|
|
|
: netSettings.Gateway;
|
2024-01-28 11:33:56 +00:00
|
|
|
dbConfig.port = 3306;
|
|
|
|
} else
|
|
|
|
dbConfig.port = netSettings.Ports['3306/tcp'][0].HostPort;
|
2022-12-21 12:34:17 +00:00
|
|
|
} catch (err) {
|
2020-12-04 09:15:29 +00:00
|
|
|
await server.rm();
|
2022-12-21 12:34:17 +00:00
|
|
|
throw err;
|
|
|
|
}
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
2020-12-04 09:15:29 +00:00
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('waitingDb');
|
2020-12-04 09:15:29 +00:00
|
|
|
await server.wait();
|
2023-08-11 13:41:03 +00:00
|
|
|
const conn = await myt.createConnection();
|
|
|
|
|
|
|
|
// Mock date functions
|
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('mockingDate');
|
2023-08-11 13:41:03 +00:00
|
|
|
const mockDateScript = path.join(dumpDir, 'mockDate.sql');
|
|
|
|
|
|
|
|
if (opts.mockDate) {
|
|
|
|
if (!await fs.pathExists(mockDateScript))
|
|
|
|
throw new Error(`Date mock enabled but mock script does not exist: ${mockDateScript}`);
|
|
|
|
|
|
|
|
let sql = await fs.readFile(mockDateScript, 'utf8');
|
|
|
|
sql = sql.replace(/@mockDate/g, SqlString.escape(opts.mockDate));
|
|
|
|
await connExt.multiQuery(conn, sql);
|
|
|
|
}
|
2022-12-21 12:34:17 +00:00
|
|
|
|
|
|
|
// Apply changes
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
const hasTriggers = await fs.exists(`${dumpDataDir}/triggers.sql`);
|
|
|
|
|
2022-12-21 12:34:17 +00:00
|
|
|
Object.assign(opts, {
|
2024-01-14 14:49:49 +00:00
|
|
|
triggers: !hasTriggers,
|
2022-12-21 12:34:17 +00:00
|
|
|
commit: true,
|
|
|
|
dbConfig
|
|
|
|
});
|
2024-01-04 12:02:40 +00:00
|
|
|
await myt.run(Push, opts);
|
2022-12-21 12:34:17 +00:00
|
|
|
|
|
|
|
// Apply fixtures
|
|
|
|
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('applyingFixtures');
|
2023-08-11 13:41:03 +00:00
|
|
|
const fixturesFiles = [
|
|
|
|
'fixtures.before',
|
|
|
|
'.fixtures',
|
|
|
|
'fixtures.after',
|
|
|
|
'fixtures.local'
|
|
|
|
]
|
|
|
|
for (const file of fixturesFiles) {
|
|
|
|
if (!await fs.exists(`${dumpDir}/${file}.sql`)) continue;
|
|
|
|
await ct.exec(null, 'docker-import.sh',
|
|
|
|
[`/workspace/dump/${file}`],
|
|
|
|
'spawn',
|
|
|
|
true
|
|
|
|
);
|
2022-12-21 12:34:17 +00:00
|
|
|
}
|
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
// Create triggers
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
if (!hasTriggers) {
|
2024-01-04 12:02:40 +00:00
|
|
|
this.emit('creatingTriggers');
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
for (const schema of opts.schemas) {
|
|
|
|
const triggersPath = `${opts.routinesDir}/${schema}/triggers`;
|
|
|
|
if (!await fs.pathExists(triggersPath))
|
|
|
|
continue;
|
2023-03-15 17:13:01 +00:00
|
|
|
|
2023-08-11 13:41:03 +00:00
|
|
|
const triggersDir = await fs.readdir(triggersPath);
|
|
|
|
for (const triggerFile of triggersDir)
|
|
|
|
await connExt.queryFromFile(conn, `${triggersPath}/${triggerFile}`);
|
|
|
|
}
|
2023-03-15 17:13:01 +00:00
|
|
|
}
|
|
|
|
|
2024-01-05 09:28:05 +00:00
|
|
|
await conn.end();
|
2020-12-04 09:15:29 +00:00
|
|
|
return server;
|
2020-12-02 07:35:26 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = Run;
|
|
|
|
|
|
|
|
if (require.main === module)
|
2024-01-04 12:02:40 +00:00
|
|
|
new Myt().cli(Run);
|