refs #5563 config splited, log on old entity
gitea/mylogger/pipeline/head There was a failure building this commit Details

This commit is contained in:
Juan Ferrer 2023-05-30 20:09:05 +02:00
parent a849049d00
commit c2c1638d1c
8 changed files with 162 additions and 101 deletions

3
.gitignore vendored
View File

@ -1,3 +1,4 @@
node_modules node_modules
zongji zongji
config.*.yml config/config.*.yml
config/logs.*.yml

View File

@ -28,6 +28,7 @@ ARG VERSION
ENV VERSION $VERSION ENV VERSION $VERSION
RUN echo $VERSION RUN echo $VERSION
COPY config config
COPY \ COPY \
LICENSE \ LICENSE \
README.md \ README.md \

View File

@ -1,44 +0,0 @@
code: mylogger
debug: false
testMode: false
pingInterval: 300
flushInterval: 30
restartTimeout: 30
queueFlushDelay: 200
maxBulkLog: 25
maxQueueEvents: 10000
upperCaseTable: true
serverId: 1
srcDb:
host: localhost
port: 3306
user: zongji
password: password
database: util
dstDb:
host: localhost
port: 3306
user: root
password: password
database: util
userField:
- editorFk
showFields:
- name
- description
- nickname
castTypes:
tinyint: boolean
logs:
item:
logTable: itemLog
mainTable: item
tables:
- name: item
exclude:
- image
- supplyResponseFk
types:
isPrinted: boolean
- name: itemTag
relation: itemFk

22
config/config.yml Normal file
View File

@ -0,0 +1,22 @@
code: mylogger
debug: false
testMode: false
pingInterval: 300
flushInterval: 30
restartTimeout: 30
queueFlushDelay: 200
maxBulkLog: 25
maxQueueEvents: 10000
serverId: 1
srcDb:
host: localhost
port: 3306
user: zongji
password: password
database: util
dstDb:
host: localhost
port: 3306
user: root
password: password
database: util

19
config/logs.yml Normal file
View File

@ -0,0 +1,19 @@
upperCaseTable: true
userField:
- editorFk
showFields:
- name
- description
- nickname
castTypes:
tinyint: boolean
logs:
item:
logTable: itemLog
mainTable: item
tables:
- itemTag
- name: item
showField: name
exclude:
- image

View File

@ -17,14 +17,21 @@ module.exports = class MyLogger {
this.queue = []; this.queue = [];
} }
async start() { loadConfig(configName) {
const defaultConfig = require('./config.yml'); const defaultConfig = require(`./config/${configName}.yml`);
const conf = this.conf = Object.assign({}, defaultConfig); const conf = Object.assign({}, defaultConfig);
const localPath = path.join(__dirname, 'config.local.yml'); const localPath = path.join(__dirname, 'config', `${configName}.local.yml`);
if (fs.existsSync(localPath)) { if (fs.existsSync(localPath)) {
const localConfig = require(localPath); const localConfig = require(localPath);
Object.assign(conf, localConfig); Object.assign(conf, localConfig);
} }
return conf;
}
async start() {
this.conf = this.loadConfig('config');
this.logsConf = this.loadConfig('logs');
const {conf, logsConf} = this;
function parseTable(tableString, defaultSchema) { function parseTable(tableString, defaultSchema) {
let name, schema; let name, schema;
@ -59,7 +66,7 @@ module.exports = class MyLogger {
tableMap.set(table.name, tableInfo); tableMap.set(table.name, tableInfo);
} }
const modelName = conf.upperCaseTable const modelName = logsConf.upperCaseTable
? toUpperCamelCase(table.name) ? toUpperCamelCase(table.name)
: table.name; : table.name;
@ -76,14 +83,14 @@ module.exports = class MyLogger {
showField, showField,
relation, relation,
idName, idName,
userField: tableConf.userField || conf.userField userField: tableConf.userField || logsConf.userField
}); });
return tableInfo; return tableInfo;
} }
for (const logName in conf.logs) { for (const logName in logsConf.logs) {
const logConf = conf.logs[logName]; const logConf = logsConf.logs[logName];
const schema = logConf.schema || conf.srcDb.database; const schema = logConf.schema || conf.srcDb.database;
const logInfo = { const logInfo = {
name: logName, name: logName,
@ -140,7 +147,7 @@ module.exports = class MyLogger {
} }
async init() { async init() {
const {conf} = this; const {conf, logsConf} = this;
this.debug('MyLogger', 'Initializing.'); this.debug('MyLogger', 'Initializing.');
this.onErrorListener = err => this.onError(err); this.onErrorListener = err => this.onError(err);
@ -198,7 +205,10 @@ module.exports = class MyLogger {
tableInfo.castTypes.set(col, tableConf.types[col]); tableInfo.castTypes.set(col, tableConf.types[col]);
const [dbCols] = await db.query( const [dbCols] = await db.query(
`SELECT COLUMN_NAME \`col\`, DATA_TYPE \`type\`, COLUMN_DEFAULT \`def\` `SELECT
COLUMN_NAME \`col\`,
DATA_TYPE \`type\`,
COLUMN_DEFAULT \`def\`
FROM information_schema.\`COLUMNS\` FROM information_schema.\`COLUMNS\`
WHERE TABLE_NAME = ? AND TABLE_SCHEMA = ?`, WHERE TABLE_NAME = ? AND TABLE_SCHEMA = ?`,
[table, schema] [table, schema]
@ -208,7 +218,7 @@ module.exports = class MyLogger {
if (!tableInfo.exclude.has(col) && col != tableInfo.userField) if (!tableInfo.exclude.has(col) && col != tableInfo.userField)
tableInfo.columns.set(col, {type, def}); tableInfo.columns.set(col, {type, def});
const castType = conf.castTypes[type]; const castType = logsConf.castTypes[type];
if (castType && !tableInfo.castTypes.has(col)) if (castType && !tableInfo.castTypes.has(col))
tableInfo.castTypes.set(col, castType); tableInfo.castTypes.set(col, castType);
} }
@ -237,7 +247,7 @@ module.exports = class MyLogger {
// Get show field // Get show field
if (!tableConf.showField) { if (!tableConf.showField) {
for (const showField of conf.showFields) { for (const showField of logsConf.showFields) {
if (tableInfo.columns.has(showField)) { if (tableInfo.columns.has(showField)) {
tableInfo.showField = showField; tableInfo.showField = showField;
break; break;
@ -246,6 +256,8 @@ module.exports = class MyLogger {
} }
} }
const showValues = new Map();
for (const [schema, tableMap] of this.schemaMap) for (const [schema, tableMap] of this.schemaMap)
for (const [table, tableInfo] of tableMap) { for (const [table, tableInfo] of tableMap) {
@ -296,17 +308,58 @@ module.exports = class MyLogger {
WHERE TABLE_NAME = ? WHERE TABLE_NAME = ?
AND TABLE_SCHEMA = ? AND TABLE_SCHEMA = ?
AND REFERENCED_TABLE_NAME IS NOT NULL`, AND REFERENCED_TABLE_NAME IS NOT NULL`,
[ [table, schema]
table,
schema
]
); );
tableInfo.relations = new Map(); tableInfo.relations = new Map();
for (const {col, schema, table, column} of relations) for (const {col, schema, table, column} of relations) {
tableInfo.relations.set(col, {schema, table, column}); tableInfo.relations.set(col, {schema, table, column});
let tables = showValues.get(schema);
if (!tables) {
tables = new Map();
showValues.set(schema, tables);
}
if (!tables.get(table)) tables.set(table, null);
}
} }
const showTables = [];
const showFields = logsConf.showFields;
for (const [schema, tableMap] of showValues)
for (const [table] of tableMap)
showTables.push([table, schema]);
const [result] = await db.query(
`SELECT
TABLE_NAME \`table\`,
TABLE_SCHEMA \`schema\`,
COLUMN_NAME \`col\`
FROM information_schema.\`COLUMNS\`
WHERE (TABLE_NAME, TABLE_SCHEMA) IN (?)
AND COLUMN_NAME IN (?)`,
[showTables, showFields]
);
for (const row of result) {
const tables = showValues.get(row.schema);
const showField = tables.get(row.table);
let save;
if (showField != null) {
const newIndex = showFields.indexOf(row.col);
const oldIndex = showFields.indexOf(showField);
save = newIndex < oldIndex;
} else
save = true;
if (save)
tables.set(row.table, row.col);
}
// TODO: End
// Zongji // Zongji
this.onBinlogListener = evt => this.onBinlog(evt); this.onBinlogListener = evt => this.onBinlog(evt);
@ -535,6 +588,27 @@ module.exports = class MyLogger {
} }
} }
function equals(a, b) {
if (a === b)
return true;
const type = typeof a;
if (a == null || b == null || type !== typeof b)
return false;
if (type === 'object' && a.constructor === b.constructor) {
if (a instanceof Date) {
// FIXME: zongji creates invalid dates for NULL DATE
// Error is somewhere here: zongji/lib/rows_event.js:129
let aTime = a.getTime();
if (isNaN(aTime)) aTime = null;
let bTime = b.getTime();
if (isNaN(bTime)) bTime = null;
return aTime === bTime;
}
}
return false;
}
if (action == 'update') { if (action == 'update') {
for (const row of evt.rows) { for (const row of evt.rows) {
let nColsChanged = 0; let nColsChanged = 0;
@ -651,6 +725,9 @@ module.exports = class MyLogger {
const logInfo = tableInfo.log; const logInfo = tableInfo.log;
const isDelete = action == 'delete'; const isDelete = action == 'delete';
const isUpdate = action == 'udpate';
const isMain = tableInfo.isMain;
const relation = tableInfo.relation;
for (const change of changes) { for (const change of changes) {
let newI, oldI; let newI, oldI;
@ -672,13 +749,13 @@ module.exports = class MyLogger {
const created = new Date(evt.timestamp); const created = new Date(evt.timestamp);
const modelName = tableInfo.modelName; const modelName = tableInfo.modelName;
const modelId = row[tableInfo.idName]; const modelId = row[tableInfo.idName];
const modelValue = tableInfo.showField && !tableInfo.isMain const modelValue = tableInfo.showField && !isMain
? row[tableInfo.showField] || null ? row[tableInfo.showField] || null
: null; : null;
const oldInstance = oldI ? JSON.stringify(oldI) : null; const oldInstance = oldI ? JSON.stringify(oldI) : null;
const originFk = !tableInfo.isMain const originFk = !isMain ? row[relation] : modelId;
? row[tableInfo.relation] const originChanged = isUpdate && !isMain
: modelId; && newI[relation] !== undefined;
let deleteRow; let deleteRow;
if (conf.debug) if (conf.debug)
@ -702,17 +779,23 @@ module.exports = class MyLogger {
]); ]);
} }
if (!conf.testMode && (!isDelete || !deleteRow)) { if (!conf.testMode && (!isDelete || !deleteRow)) {
await logInfo.addStmt.execute([ async function log(originFk) {
originFk, return logInfo.addStmt.execute([
row[tableInfo.userField] || null, originFk,
action, row[tableInfo.userField] || null,
created, action,
modelName, created,
oldInstance, modelName,
newI ? JSON.stringify(newI) : null, oldInstance,
modelId, newI ? JSON.stringify(newI) : null,
modelValue modelId,
]); modelValue
]);
}
await log(originFk);
if (originChanged)
await log(oldI[relation]);
} }
} catch (err) { } catch (err) {
if (err.code == 'ER_NO_REFERENCED_ROW_2') { if (err.code == 'ER_NO_REFERENCED_ROW_2') {
@ -785,24 +868,3 @@ function toUpperCamelCase(str) {
match => match.charAt(1).toUpperCase()); match => match.charAt(1).toUpperCase());
return str.charAt(0).toUpperCase() + str.substr(1); return str.charAt(0).toUpperCase() + str.substr(1);
} }
function equals(a, b) {
if (a === b)
return true;
const type = typeof a;
if (a == null || b == null || type !== typeof b)
return false;
if (type === 'object' && a.constructor === b.constructor) {
if (a instanceof Date) {
// FIXME: zongji creates invalid dates for NULL DATE
// Error is somewhere here: zongji/lib/rows_event.js:129
let aTime = a.getTime();
if (isNaN(aTime)) aTime = null;
let bTime = b.getTime();
if (isNaN(bTime)) bTime = null;
return aTime === bTime;
}
}
return false;
}

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "mylogger", "name": "mylogger",
"version": "0.1.16", "version": "0.1.17",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "mylogger", "name": "mylogger",
"version": "0.1.16", "version": "0.1.17",
"license": "GPL-3.0", "license": "GPL-3.0",
"dependencies": { "dependencies": {
"colors": "^1.4.0", "colors": "^1.4.0",

View File

@ -1,6 +1,6 @@
{ {
"name": "mylogger", "name": "mylogger",
"version": "0.1.16", "version": "0.1.17",
"author": "Verdnatura Levante SL", "author": "Verdnatura Levante SL",
"description": "MySQL and MariaDB logger using binary log", "description": "MySQL and MariaDB logger using binary log",
"license": "GPL-3.0", "license": "GPL-3.0",