3837 - Import data from external service
gitea/salix/pipeline/head There was a failure building this commit Details

This commit is contained in:
Joan Sanchez 2022-04-11 11:58:40 +02:00
parent 321d684f34
commit 47a2f76aa4
21 changed files with 43827 additions and 3 deletions

View File

@ -7,7 +7,7 @@ RUN apt-get update \
curl \ curl \
ca-certificates \ ca-certificates \
gnupg2 \ gnupg2 \
libfontconfig \ libfontconfig lftp \
&& apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \ && apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \ libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \ libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \

View File

@ -0,0 +1,14 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE bucket
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12)
SET
bucket_id = @col2,
bucket_type_id = @col4,
description = @col5,
x_size = @col6,
y_size = @col7,
z_size = @col8,
entry_date = STR_TO_DATE(@col10, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col11, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col12, '%Y%m%d%H%i')

View File

@ -0,0 +1,10 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE bucket_type
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
SET
bucket_type_id = @col2,
description = @col3,
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE `feature`
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
SET
item_id = @col2,
feature_type_id = @col3,
feature_value = @col4,
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')

View File

@ -0,0 +1,10 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE genus
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
SET
genus_id = @col2,
latin_genus_name = @col3,
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')

View File

@ -0,0 +1,13 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE item
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12)
SET
id = @col2,
product_name = @col4,
name = @col5,
plant_id = @col7,
group_id = @col9,
entry_date = STR_TO_DATE(@col10, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col11, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col12, '%Y%m%d%H%i')

View File

@ -0,0 +1,12 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE `item_feature`
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8)
SET
item_id = @col2,
feature = @col3,
regulation_type = @col4,
presentation_order = @col5,
entry_date = STR_TO_DATE(@col6, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col7, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col8, '%Y%m%d%H%i')

View File

@ -0,0 +1,10 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE item_group
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
SET
group_code = @col2,
dutch_group_description = @col3,
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE plant
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9)
SET
plant_id = @col3,
genus_id = @col4,
specie_id = @col5,
entry_date = STR_TO_DATE(@col7, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col8, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col9, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE specie
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
SET
specie_id = @col2,
genus_id = @col3,
latin_species_name = @col4,
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE edi.supplier
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12, @col13, @col14, @col15, @col16, @col17, @col18, @col19, @col20)
SET
GLNAddressCode = @col2,
supplier_id = @col4,
company_name = @col3,
entry_date = STR_TO_DATE(@col9, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col10, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col11, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE `type`
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
SET
type_id = @col2,
type_group_id = @col3,
description = @col4,
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')

View File

@ -0,0 +1,11 @@
LOAD DATA LOCAL INFILE ?
INTO TABLE `value`
FIELDS TERMINATED BY ';'
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
SET
type_id = @col2,
type_value = @col3,
type_description = @col4,
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')

View File

@ -0,0 +1,155 @@
/* eslint no-console: "off" */
const path = require('path');
const fs = require('fs-extra');
module.exports = Self => {
Self.remoteMethodCtx('updateData', {
description: 'Updates schema data from external provider',
accessType: 'WRITE',
returns: {
type: 'object',
root: true
},
http: {
path: `/updateData`,
verb: 'GET'
}
});
Self.updateData = async() => {
const models = Self.app.models;
const container = await models.TempContainer.container('edi');
const tempPath = path.join(container.client.root, container.name);
const [ftpConfig] = await Self.rawSql('SELECT host, user, password FROM edi.ftpConfig');
console.debug(`Openning FTP connection to ${ftpConfig.host}...\n`);
const FtpClient = require('ftps');
const ftpClient = new FtpClient({
host: ftpConfig.host,
username: ftpConfig.user,
password: ftpConfig.password,
procotol: 'ftp'
});
const files = await Self.rawSql('SELECT fileName, toTable, file, updated FROM edi.fileConfig');
let remoteFile;
let tempDir;
let tempFile;
for (const file of files) {
try {
const fileName = file.file;
console.debug(`Downloading file ${fileName}...`);
remoteFile = `codes/${fileName}.ZIP`;
tempDir = `${tempPath}/${fileName}`;
tempFile = `${tempPath}/${fileName}.zip`;
await extractFile({
ftpClient: ftpClient,
file: file,
paths: {
remoteFile: remoteFile,
tempDir: tempDir,
tempFile: tempFile
}
});
} catch (error) {
if (fs.existsSync(tempFile))
await fs.unlink(tempFile);
await fs.rmdir(tempDir, {recursive: true});
console.error(error);
}
}
return true;
};
async function extractFile({ftpClient, file, paths}) {
// Download the zip file
ftpClient.get(paths.remoteFile, paths.tempFile);
// Execute download command
ftpClient.exec(async(err, response) => {
if (response.error) {
console.debug(`Error downloading file... ${response.error}`);
return;
}
const AdmZip = require('adm-zip');
const zip = new AdmZip(paths.tempFile);
const entries = zip.getEntries();
zip.extractAllTo(paths.tempDir, false);
if (fs.existsSync(paths.tempFile))
await fs.unlink(paths.tempFile);
await dumpData({file, entries, paths});
await fs.rmdir(paths.tempDir, {recursive: true});
});
}
async function dumpData({file, entries, paths}) {
const toTable = file.toTable;
const baseName = file.fileName;
for (const zipEntry of entries) {
const entryName = zipEntry.entryName;
console.log(`Reading file ${entryName}...`);
const startIndex = (entryName.length - 10);
const endIndex = (entryName.length - 4);
const dateStr = entryName.substring(startIndex, endIndex);
const lastUpdated = new Date();
// Format string date to a date object
let updated = null;
if (file.updated) {
updated = new Date(file.updated);
updated.setHours(0, 0, 0, 0);
}
lastUpdated.setFullYear(`20${dateStr.substring(4, 6)}`);
lastUpdated.setMonth(parseInt(dateStr.substring(2, 4)) - 1);
lastUpdated.setDate(dateStr.substring(0, 2));
lastUpdated.setHours(0, 0, 0, 0);
if (updated && lastUpdated <= updated) {
console.debug(`Table ${toTable} already updated, skipping...`);
continue;
}
console.log('Dumping data...');
const templatePath = path.join(__dirname, `./sql/${toTable}.sql`);
const ff = fs.readFileSync(templatePath, 'utf8');
const rawPath = path.join(paths.tempDir, entryName);
try {
const tx = await Self.beginTransaction({});
const options = {transaction: tx};
await Self.rawSql(`DELETE FROM edi.${toTable}`, null, options);
await Self.rawSql(ff, [rawPath], options);
await Self.rawSql(`
UPDATE edi.fileConfig
SET updated = ?
WHERE fileName = ?
`, [lastUpdated, baseName], options);
tx.commit();
} catch (error) {
tx.rollback();
throw error;
}
console.log(`Updated table ${toTable}\n`);
}
}
};

View File

@ -109,6 +109,9 @@
}, },
"OsTicket": { "OsTicket": {
"dataSource": "osticket" "dataSource": "osticket"
},
"Edi": {
"dataSource": "vn"
} }
} }

3
back/models/edi.js Normal file
View File

@ -0,0 +1,3 @@
module.exports = Self => {
require('../methods/edi/updateData')(Self);
};

12
back/models/edi.json Normal file
View File

@ -0,0 +1,12 @@
{
"name": "Edi",
"base": "VnModel",
"acls": [{
"property": "validations",
"accessType": "EXECUTE",
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
}]
}

View File

@ -0,0 +1,2 @@
INSERT INTO salix.ACL (model, property, accessType, permission, principalType, principalId)
VALUES ('Edi', 'updateData', 'WRITE', 'ALLOW', 'ROLE', 'employee');

View File

@ -15,7 +15,8 @@
"legacyUtcDateProcessing": false, "legacyUtcDateProcessing": false,
"timezone": "local", "timezone": "local",
"connectTimeout": 40000, "connectTimeout": 40000,
"acquireTimeout": 20000 "acquireTimeout": 20000,
"waitForConnections": true
}, },
"osticket": { "osticket": {
"connector": "memory", "connector": "memory",

43512
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -12,10 +12,12 @@
"node": ">=14" "node": ">=14"
}, },
"dependencies": { "dependencies": {
"adm-zip": "^0.5.9",
"axios": "^0.25.0", "axios": "^0.25.0",
"bmp-js": "^0.1.0", "bmp-js": "^0.1.0",
"compression": "^1.7.3", "compression": "^1.7.3",
"fs-extra": "^5.0.0", "fs-extra": "^5.0.0",
"ftps": "^1.2.0",
"got": "^10.7.0", "got": "^10.7.0",
"helmet": "^3.21.2", "helmet": "^3.21.2",
"i18n": "^0.8.4", "i18n": "^0.8.4",
@ -39,7 +41,6 @@
"require-yaml": "0.0.1", "require-yaml": "0.0.1",
"sharp": "^0.27.1", "sharp": "^0.27.1",
"smbhash": "0.0.1", "smbhash": "0.0.1",
"soap": "^0.35.0",
"strong-error-handler": "^2.3.2", "strong-error-handler": "^2.3.2",
"uuid": "^3.3.3", "uuid": "^3.3.3",
"vn-loopback": "file:./loopback", "vn-loopback": "file:./loopback",