4164 - Endpoint refactor
gitea/salix/pipeline/head There was a failure building this commit Details

This commit is contained in:
Joan Sanchez 2022-06-07 13:06:42 +02:00
parent 96f500d161
commit f77f5cee58
5 changed files with 44124 additions and 92 deletions

View File

@ -19,90 +19,175 @@ module.exports = Self => {
Self.updateData = async() => { Self.updateData = async() => {
const models = Self.app.models; const models = Self.app.models;
// Get files checksum
const files = await Self.rawSql('SELECT name, checksum, keyValue FROM edi.fileConfig');
const updatableFiles = [];
for (const file of files) {
const fileChecksum = await getChecksum(file);
if (file.checksum != fileChecksum) {
updatableFiles.push({
name: file.name,
checksum: fileChecksum
});
} else
console.debug(`File already updated, skipping...`);
}
if (updatableFiles.length === 0)
return false;
// Download files
const container = await models.TempContainer.container('edi'); const container = await models.TempContainer.container('edi');
const tempPath = path.join(container.client.root, container.name); const tempPath = path.join(container.client.root, container.name);
// Temporary file clean
await fs.rmdir(`${tempPath}/*`, {recursive: true});
const [ftpConfig] = await Self.rawSql('SELECT host, user, password FROM edi.ftpConfig');
console.debug(`Openning FTP connection to ${ftpConfig.host}...\n`);
const FtpClient = require('ftps');
const ftpClient = new FtpClient({
host: ftpConfig.host,
username: ftpConfig.user,
password: ftpConfig.password,
procotol: 'ftp'
});
const files = await Self.rawSql('SELECT fileName, toTable, file, updated FROM edi.fileConfig');
let remoteFile; let remoteFile;
let tempDir; let tempDir;
let tempFile; let tempFile;
for (const file of files) {
try {
const fileName = file.file;
console.debug(`Downloading file ${fileName}...`); const fileNames = updatableFiles.map(file => file.name);
remoteFile = `codes/${fileName}.ZIP`; const tables = await Self.rawSql(`
tempDir = `${tempPath}/${fileName}`; SELECT fileName, toTable, file
tempFile = `${tempPath}/${fileName}.zip`; FROM edi.tableConfig
WHERE file IN (?)`, [fileNames]);
// if (fs.existsSync(tempFile)) for (const table of tables) {
// await fs.unlink(tempFile); const fileName = table.file;
// if (fs.existsSync(tempDir)) console.debug(`Downloading file ${fileName}...`);
// await fs.rmdir(tempDir, {recursive: true});
await extractFile({ remoteFile = `codes/${fileName}.ZIP`;
ftpClient: ftpClient, tempDir = `${tempPath}/${fileName}`;
file: file, tempFile = `${tempPath}/${fileName}.zip`;
paths: {
remoteFile: remoteFile,
tempDir: tempDir,
tempFile: tempFile
}
});
} catch (error) {
if (fs.existsSync(tempFile))
await fs.unlink(tempFile);
console.error(error); if (!fs.existsSync(tempFile)) {
} const downloadOutput = await downloadFile(remoteFile, tempFile);
if (downloadOutput.error)
continue;
} else
console.log('Already downloaded, skipping...');
console.debug(`Extracting file ${fileName}...`);
await extractFile(tempFile, tempDir);
console.debug(`Updating table ${table.toTable}...`);
await dumpData(tempDir, table);
} }
// Update files checksum
for (const file of updatableFiles) {
await Self.rawSql(`
UPDATE edi.fileConfig
SET checksum = ?
WHERE name = ?`,
[file.checksum, file.name]);
}
// Clean files
if (fs.existsSync(tempPath))
fs.rmSync(tempPath, {recursive: true, force: true});
return true; return true;
}; };
async function extractFile({ftpClient, file, paths}) { let ftpClient;
// Download the zip file async function getFtpClient() {
ftpClient.get(paths.remoteFile, paths.tempFile); if (!ftpClient) {
const [ftpConfig] = await Self.rawSql('SELECT host, user, password FROM edi.ftpConfig');
console.debug(`Openning FTP connection to ${ftpConfig.host}...\n`);
// Execute download command const FtpClient = require('ftps');
ftpClient.exec(async(err, response) => {
if (response.error) {
console.debug(`Error downloading file... ${response.error}`);
return;
}
const AdmZip = require('adm-zip'); ftpClient = new FtpClient({
const zip = new AdmZip(paths.tempFile); host: ftpConfig.host,
const entries = zip.getEntries(); username: ftpConfig.user,
password: ftpConfig.password,
procotol: 'ftp'
});
}
zip.extractAllTo(paths.tempDir, false); return ftpClient;
}
await dumpData({file, entries, paths}); async function getChecksum(file) {
const ftpClient = await getFtpClient();
console.debug(`Checking checksum for file ${file.name}...`);
await fs.rmdir(paths.tempDir, {recursive: true}); ftpClient.cat(`codes/${file.name}.txt`);
const response = await new Promise((resolve, reject) => {
ftpClient.exec((err, response) => {
if (response.error) {
console.debug(`Error downloading checksum file... ${response.error}`);
reject(err);
}
resolve(response);
});
});
if (response && response.data) {
const fileContents = response.data;
const rows = fileContents.split('\n');
const row = rows[4];
const columns = row.split(/\s+/);
let fileChecksum;
if (file.keyValue)
fileChecksum = columns[1];
if (!file.keyValue)
fileChecksum = columns[0];
return fileChecksum;
}
}
async function downloadFile(remoteFile, tempFile) {
const ftpClient = await getFtpClient();
ftpClient.get(remoteFile, tempFile);
return new Promise((resolve, reject) => {
ftpClient.exec((err, response) => {
if (response.error) {
console.debug(`Error downloading file... ${response.error}`);
reject(err);
}
resolve(response);
});
}); });
} }
async function dumpData({file, entries, paths}) { async function extractFile(tempFile, tempDir) {
const toTable = file.toTable; const JSZip = require('jszip');
const baseName = file.fileName;
if (!fs.existsSync(tempDir))
fs.mkdirSync(tempDir);
const fileStream = await fs.readFile(tempFile);
if (fileStream) {
const zip = new JSZip();
const zipContents = await zip.loadAsync(fileStream);
if (!zipContents) return;
const fileNames = Object.keys(zipContents.files);
for (const fileName of fileNames) {
const fileContent = await zip.file(fileName).async('nodebuffer');
const dest = path.join(tempDir, fileName);
fs.writeFileSync(dest, fileContent);
}
}
}
async function dumpData(tempDir, table) {
const toTable = table.toTable;
const baseName = table.fileName;
const tx = await Self.beginTransaction({}); const tx = await Self.beginTransaction({});
@ -112,44 +197,22 @@ module.exports = Self => {
const tableName = `edi.${toTable}`; const tableName = `edi.${toTable}`;
await Self.rawSql(`DELETE FROM ??`, [tableName], options); await Self.rawSql(`DELETE FROM ??`, [tableName], options);
for (const zipEntry of entries) { const files = fs.readdirSync(tempDir)
const entryName = zipEntry.entryName; .filter(file => file.startsWith(baseName));
console.log(`Reading file ${entryName}...`);
const startIndex = (entryName.length - 10); for (const file of files) {
const endIndex = (entryName.length - 4); console.log(`Dumping data from file ${file}...`);
const dateString = entryName.substring(startIndex, endIndex);
const lastUpdated = new Date();
// Format string date to a date object
let updated = null;
if (file.updated) {
updated = new Date(file.updated);
updated.setHours(0, 0, 0, 0);
}
lastUpdated.setFullYear(`20${dateString.substring(4, 6)}`);
lastUpdated.setMonth(parseInt(dateString.substring(2, 4)) - 1);
lastUpdated.setDate(dateString.substring(0, 2));
lastUpdated.setHours(0, 0, 0, 0);
if (updated && lastUpdated <= updated) {
console.debug(`Table ${toTable} already updated, skipping...`);
continue;
}
console.log('Dumping data...');
const templatePath = path.join(__dirname, `./sql/${toTable}.sql`); const templatePath = path.join(__dirname, `./sql/${toTable}.sql`);
const sqlTemplate = fs.readFileSync(templatePath, 'utf8'); const sqlTemplate = fs.readFileSync(templatePath, 'utf8');
const filePath = path.join(tempDir, file);
const rawPath = path.join(paths.tempDir, entryName); await Self.rawSql(sqlTemplate, [filePath], options);
await Self.rawSql(sqlTemplate, [rawPath], options);
await Self.rawSql(` await Self.rawSql(`
UPDATE edi.fileConfig UPDATE edi.tableConfig
SET updated = ? SET updated = ?
WHERE fileName = ? WHERE fileName = ?
`, [lastUpdated, baseName], options); `, [new Date(), baseName], options);
} }
tx.commit(); tx.commit();

View File

@ -0,0 +1 @@
RENAME TABLE `edi`.`fileConfig` to `edi`.`tableConfig`;

View File

@ -0,0 +1,22 @@
CREATE TABLE `edi`.`fileConfig`
(
name varchar(25) NOT NULL,
checksum text NULL,
keyValue tinyint(1) default true NOT NULL,
constraint fileConfig_pk
primary key (name)
);
create unique index fileConfig_name_uindex
on fileConfig (name);
INSERT INTO `edi`.`fileConfig` (name, checksum, keyValue)
VALUES ('FEC010104', null, 0);
INSERT INTO `edi`.`fileConfig` (name, checksum, keyValue)
VALUES ('VBN020101', null, 1);
INSERT INTO `edi`.`fileConfig` (name, checksum, keyValue)
VALUES ('florecompc2', null, 1);

43946
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -12,7 +12,6 @@
"node": ">=14" "node": ">=14"
}, },
"dependencies": { "dependencies": {
"adm-zip": "^0.5.9",
"axios": "^0.25.0", "axios": "^0.25.0",
"bmp-js": "^0.1.0", "bmp-js": "^0.1.0",
"compression": "^1.7.3", "compression": "^1.7.3",
@ -23,6 +22,7 @@
"i18n": "^0.8.4", "i18n": "^0.8.4",
"image-type": "^4.1.0", "image-type": "^4.1.0",
"imap": "^0.8.19", "imap": "^0.8.19",
"jszip": "^3.10.0",
"ldapjs": "^2.2.0", "ldapjs": "^2.2.0",
"loopback": "^3.26.0", "loopback": "^3.26.0",
"loopback-boot": "3.3.1", "loopback-boot": "3.3.1",