Compare commits
1 Commits
dev
...
2268-db_in
Author | SHA1 | Date |
---|---|---|
Carlos Jimenez Ruiz | 751df8a492 |
|
@ -1,6 +1,4 @@
|
|||
node_modules
|
||||
print/node_modules
|
||||
front
|
||||
db
|
||||
e2e
|
||||
storage
|
||||
front/node_modules
|
||||
services
|
|
@ -1,6 +1,6 @@
|
|||
extends: [eslint:recommended, google, plugin:jasmine/recommended]
|
||||
parserOptions:
|
||||
ecmaVersion: 2020
|
||||
ecmaVersion: 2018
|
||||
sourceType: "module"
|
||||
plugins:
|
||||
- jasmine
|
||||
|
@ -17,7 +17,7 @@ rules:
|
|||
camelcase: 0
|
||||
default-case: 0
|
||||
no-eq-null: 0
|
||||
no-console: ["warn"]
|
||||
no-console: ["error"]
|
||||
no-warning-comments: 0
|
||||
no-empty: [error, allowEmptyCatch: true]
|
||||
complexity: 0
|
||||
|
@ -35,8 +35,4 @@ rules:
|
|||
space-in-parens: ["error", "never"]
|
||||
jasmine/no-focused-tests: 0
|
||||
jasmine/prefer-toHaveBeenCalledWith: 0
|
||||
arrow-spacing: ["error", { "before": true, "after": true }]
|
||||
no-restricted-syntax:
|
||||
- "error"
|
||||
- selector: "NewExpression[callee.name='Date']"
|
||||
message: "Use Date.vnNew() instead of new Date()."
|
||||
arrow-spacing: ["error", { "before": true, "after": true }]
|
|
@ -2,12 +2,10 @@ coverage
|
|||
node_modules
|
||||
dist
|
||||
storage
|
||||
.idea
|
||||
npm-debug.log
|
||||
.eslintcache
|
||||
datasources.*.json
|
||||
print.*.json
|
||||
db.json
|
||||
junit.xml
|
||||
.DS_Store
|
||||
storage
|
||||
.DS_Store
|
|
@ -1,33 +0,0 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function getCurrentBranchName(p = process.cwd()) {
|
||||
if (!fs.existsSync(p)) return false;
|
||||
|
||||
const gitHeadPath = path.join(p, '.git', 'HEAD');
|
||||
|
||||
if (!fs.existsSync(gitHeadPath))
|
||||
return getCurrentBranchName(path.resolve(p, '..'));
|
||||
|
||||
const headContent = fs.readFileSync(gitHeadPath, 'utf-8');
|
||||
return headContent.trim().split('/')[2];
|
||||
}
|
||||
|
||||
const branchName = getCurrentBranchName();
|
||||
|
||||
if (branchName) {
|
||||
const msgPath = `.git/COMMIT_EDITMSG`;
|
||||
const msg = fs.readFileSync(msgPath, 'utf-8');
|
||||
const reference = branchName.match(/^\d+/);
|
||||
|
||||
const referenceTag = `refs #${reference}`;
|
||||
if (!msg.includes(referenceTag) && reference) {
|
||||
const splitedMsg = msg.split(':');
|
||||
|
||||
if (splitedMsg.length > 1) {
|
||||
const finalMsg = splitedMsg[0] + ': ' + referenceTag + splitedMsg.slice(1).join(':');
|
||||
fs.writeFileSync(msgPath, finalMsg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
echo "Running husky commit-msg hook"
|
||||
npx --no-install commitlint --edit
|
||||
echo "Adding reference tag to commit message"
|
||||
node .husky/addReferenceTag.js
|
||||
|
|
@ -1,22 +1,8 @@
|
|||
// Coloque su configuración en este archivo para sobrescribir la configuración predeterminada y de usuario.
|
||||
{
|
||||
// Carácter predeterminado de final de línea.
|
||||
"files.eol": "\n",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"search.useIgnoreFiles": false,
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint",
|
||||
"eslint.format.enable": true,
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "dbaeumer.vscode-eslint"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "vscode.json-language-features"
|
||||
},
|
||||
"cSpell.words": [
|
||||
"salix",
|
||||
"fdescribe",
|
||||
"Loggable"
|
||||
]
|
||||
}
|
||||
// Coloque su configuración en este archivo para sobrescribir la configuración predeterminada y de usuario.
|
||||
{
|
||||
// Carácter predeterminado de final de línea.
|
||||
"files.eol": "\n",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": true
|
||||
}
|
||||
}
|
778
CHANGELOG.md
778
CHANGELOG.md
|
@ -1,778 +0,0 @@
|
|||
# Version 24.38 - 2024-09-17
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- chore: refs #7323 filter data by:jorgep
|
||||
- chore: refs #7323 fix test by:jorgep
|
||||
- chore: refs #7323 worker changes by:jorgep
|
||||
- chore: refs #7323 worker changes wip by:jorgep
|
||||
- chore: refs #7524 add select limit by:jorgep
|
||||
- feat(AccessToken&ACL): refs #7547 upgrade security by:alexm
|
||||
- feat: deleted code and redirect to Lilium by:Jon
|
||||
- feat: refs #4515 New throw buy_checkItem by:guillermo
|
||||
- feat: refs #6650 Added saleGroupLog by:guillermo
|
||||
- feat: refs #6650 new itemShelvingLog by:guillermo
|
||||
- feat: refs #6760 refs #actualiza campo nickname by:jgallego
|
||||
- feat: refs #7277 fdescribe by:jgallego
|
||||
- feat: refs #7277 fit by:jgallego
|
||||
- feat: refs #7277 refundInvoices by:jgallego
|
||||
- feat: refs #7277 test with warehouse by:jgallego
|
||||
- feat: refs #7277 traducciones by:jgallego
|
||||
- feat: refs #7277 transfer addressFk by:jgallego
|
||||
- feat: refs #7532 Requested changes by:guillermo
|
||||
- feat: refs #7564 Added proc by:guillermo
|
||||
- feat: refs #7564 Added ticket_setVolumeItemCost by:guillermo
|
||||
- feat: refs #7564 Added volume column by:guillermo
|
||||
- feat: refs #7564 Fix version by:guillermo
|
||||
- feat: refs #7564 Requested changes by:guillermo
|
||||
- feat: refs #7615 setDeleted by:robert
|
||||
- feat: refs #7650 Added no transfer lines to inventory entry and fixtures by:guillermo
|
||||
- feat: refs #7650 Fix tests by:guillermo
|
||||
- feat: refs #7747 Delete buyUltimate and buyUltimateFromInterval by:ivanm
|
||||
- feat: refs #7759 Changed defined only of vn objects by:guillermo
|
||||
- feat: refs #7759 Changed definer root to vn-admin by:guillermo
|
||||
- feat: refs #7759 Changed name by:guillermo
|
||||
- feat: refs #7759 Deleted version 11163-maroonEucalyptus by:guillermo
|
||||
- feat: refs #7759 Revoke routine grants vn by:guillermo
|
||||
- feat: refs #7811 Added comment by:guillermo
|
||||
- feat: refs #7811 Added new params in datasources.json by:guillermo
|
||||
- feat: refs #7898 Add column "floor" in vn.parking by:ivanm
|
||||
- feat: refs #7898 Modify default by:ivanm
|
||||
- feat: refs #7905 Added new method getBuysCsv by:guillermo
|
||||
- feat: refs #7905 Added param toCsv by:guillermo
|
||||
- feat: refs #7938 remove unnecessary insert in clientLog by:alexm
|
||||
- feat: refs #7953 pullinfo (7953-devToTest_2438) by:alexm
|
||||
- feat(salix): #7671 define isDestiny field in model by:Javier Segarra
|
||||
- feat(salix): refs #7896 update version and changelog (origin/7896_down_devToTest_2436) by:Javier Segarra
|
||||
- feat(salix): refs #7905 #7905 use getBuys toCSV flattened by:Javier Segarra
|
||||
- feat(ssalix): refs #7671 #7671 checkDates by:Javier Segarra
|
||||
- feat(ssalix): refs #7671 #7671 checkDates to present by:Javier Segarra
|
||||
- feat: ticket 215005 Changed acl show transferClient by:guillermo
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- perf: refs #7671 improve showBadDates by:Javier Segarra
|
||||
- perf(salix): refs #7671 #7671 imrpove and revert where changes by:Javier Segarra
|
||||
- refactor: deleted e2e & added back descriptor and summary by:Jon
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- chore: refs #7323 fix test by:jorgep
|
||||
- feat: refs #7650 Added no transfer lines to inventory entry and fixtures by:guillermo
|
||||
- fix by:guillermo
|
||||
- fixes: refs #7760 collection problems by:Carlos Andrés
|
||||
- fix merge dev (7407-workerMedical) by:alexm
|
||||
- fix: refs #6727 No delete log tables data in clean procedures by:guillermo
|
||||
- fix: refs #6897 back and tests by:carlossa
|
||||
- fix: refs #6897 back by:carlossa
|
||||
- fix: refs #6897 fix filter by:carlossa
|
||||
- fix: refs #6897 fix json by:carlossa
|
||||
- fix: refs #6897 travel filter by:carlossa
|
||||
- fix: refs #6897 error test by:jgallego
|
||||
- fix: refs #7323 fetch from right source by:jorgep
|
||||
- fix: refs #7564 Deleted query by:guillermo
|
||||
- fix: refs #7759 Added user 'vn'@'localhost' & grants by:guillermo
|
||||
- fix: refs #7760 collection problems by:Carlos Andrés
|
||||
- fix: refs #7760 tmp.ticketIPT by:Carlos Andrés
|
||||
- fix: refs #7905 added comments to flatten.js by:guillermo
|
||||
- fix: refs ##7905 Handle error by:guillermo
|
||||
- fix(salix): refs #7905 #7905 use right fn to flatten data by:Javier Segarra
|
||||
- perf(salix): refs #7671 #7671 imrpove and revert where changes by:Javier Segarra
|
||||
- refs #6898 fix supplier remove by:carlossa
|
||||
- refs #7407 fix acls fixtures by:carlossa
|
||||
- test: fix connections e2e (7547-accessToken-security) by:alexm
|
||||
- test: refs #7277 fix test proposal by:Javier Segarra
|
||||
- test(salix): refs #7671 #7671 improve and revert where changes by:Javier Segarra
|
||||
|
||||
# Version 24.36 - 2024-09-03
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- chore: refs #7524 WIP limit call by:jorgep
|
||||
- chore: refs #7524 modify ormConfig table col (origin/7524-warmfix-modifyColumn) by:jorgep
|
||||
- feat(update-user): refs #7848 add twoFactor by:alexm
|
||||
- feat: #3199 Requested changes by:guillermo
|
||||
- feat: refs #3199 Added more scopes ticket_recalcByScope by:guillermo
|
||||
- feat: refs #3199 Added one more scope ticket_recalcByScope by:guillermo
|
||||
- feat: refs #3199 Created ticket_recalcItemTaxCountryByScope by:guillermo
|
||||
- feat: refs #3199 Requested changes by:guillermo
|
||||
- feat: refs #7346 add multiple feature by:jgallego
|
||||
- feat: refs #7346 backTest checks new implementation by:jgallego
|
||||
- feat: refs #7346 mas intuitivo by:jgallego
|
||||
- feat: refs #7514 Changes to put srt log (origin/7514-srtLog) by:guillermo
|
||||
- feat: refs #7524 add default limit (origin/7524-limitSelect) by:jorgep
|
||||
- feat: refs #7524 add mock limit on find query by:jorgep
|
||||
- feat: refs #7524 wip remote hooks by:jorgep
|
||||
- feat: refs #7562 Requested changes by:guillermo
|
||||
- feat: refs #7567 Changed time to call event by:guillermo
|
||||
- feat: refs #7567 Requested changes by:guillermo
|
||||
- feat: refs #7710 pr revision by:jgallego
|
||||
- feat: refs #7710 test fixed (origin/7710-cloneWithTicketPackaging, 7710-cloneWithTicketPackaging) by:jgallego
|
||||
- feat: refs #7712 Fix by:guillermo
|
||||
- feat: refs #7712 Unify by:guillermo
|
||||
- feat: refs #7712 sizeLimit (origin/7712-sizeLimit) by:guillermo
|
||||
- feat: refs #7758 Add code mandateType and accountDetailType by:ivanm
|
||||
- feat: refs #7758 Modify code lowerCamelCase and UNIQUE by:ivanm
|
||||
- feat: refs #7758 accountDetailType fix deploy error by:ivanm
|
||||
- feat: refs #7784 Changes in entry-order-pdf by:guillermo
|
||||
- feat: refs #7784 Requested changes by:guillermo
|
||||
- feat: refs #7799 Added Fk in vn.item.itemPackingTypeFk by:guillermo
|
||||
- feat: refs #7800 Added company Fk by:guillermo
|
||||
- feat: refs #7842 Added editorFk in vn.host by:guillermo
|
||||
- feat: refs #7860 Update new packagings (origin/7860-newPackaging) by:guillermo
|
||||
- feat: refs #7862 roadmap new fields by:ivanm
|
||||
- feat: refs #7882 Added quadMindsConfig table by:guillermo
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- refactor: refs #7567 Fix and improvement by:guillermo
|
||||
- refactor: refs #7567 Minor change by:guillermo
|
||||
- refactor: refs #7756 Fix tests by:guillermo
|
||||
- refactor: refs #7798 Drop bi.Greuges_comercial_detail by:guillermo
|
||||
- refactor: refs #7848 adapt to lilium by:alexm
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- feat: refs #7710 test fixed (origin/7710-cloneWithTicketPackaging, 7710-cloneWithTicketPackaging) by:jgallego
|
||||
- feat: refs #7758 accountDetailType fix deploy error by:ivanm
|
||||
- fix(salix): #7283 ItemFixedPrice duplicated (origin/7283_itemFixedPrice_duplicated) by:Javier Segarra
|
||||
- fix: refs #7346 minor error (origin/7346, 7346) by:jgallego
|
||||
- fix: refs #7355 remove and tests accounts (origin/7355-accountMigration2) by:carlossa
|
||||
- fix: refs #7355 remove and tests accounts by:carlossa
|
||||
- fix: refs #7524 default limit select by:jorgep
|
||||
- fix: refs #7756 Foreign keys invoiceOut (origin/7756-fixRefFk) by:guillermo
|
||||
- fix: refs #7756 id 0 by:guillermo
|
||||
- fix: refs #7800 tpvMerchantEnable PRIMARY KEY (origin/7800-tpvMerchantEnable) by:guillermo
|
||||
- fix: refs #7800 tpvMerchantEnable PRIMARY KEY by:guillermo
|
||||
- fix: refs #7916 itemShelving_transfer (origin/test, test) by:guillermo
|
||||
- fix: refs #pako Deleted duplicated version by:guillermo
|
||||
|
||||
# Version 24.34 - 2024-08-20
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- #6900 feat: clear empty by:jorgep
|
||||
- #6900 feat: empty commit by:jorgep
|
||||
- chore: refs #6900 beautify code by:jorgep
|
||||
- chore: refs #6989 add config model by:jorgep
|
||||
- feat workerActivity refs #6078 by:sergiodt
|
||||
- feat: #6453 Refactor (origin/6453-orderConfirm) by:guillermo
|
||||
- feat: #6453 Rollback always split by itemPackingType by:guillermo
|
||||
- feat: deleted worker module code & redirect to Lilium by:Jon
|
||||
- feat: refs #6453 Added new ticket search by:guillermo
|
||||
- feat: refs #6453 Fixes by:guillermo
|
||||
- feat: refs #6453 Minor changes by:guillermo
|
||||
- feat: refs #6453 Requested changes by:guillermo
|
||||
- feat: refs #6900 drop section by:jorgep
|
||||
- feat: refs #7283 order by desc date by:jorgep
|
||||
- feat: refs #7323 add locale by:jorgep
|
||||
- feat: refs #7323 redirect to lilium by:jorgep
|
||||
- feat: refs #7646 delete scannableCodeType by:robert
|
||||
- feat: refs #7713 Created ACLLog by:guillermo
|
||||
- feat: refs #7774 (origin/7774-ticket_cloneWeekly) by:robert
|
||||
- feat: refs #7774 #7774 Changes ticket_cloneWeekly by:guillermo
|
||||
- feat: refs #7774 ticket_cloneWeekly by:robert
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- refactor: refs #6453 Major changes by:guillermo
|
||||
- refactor: refs #6453 Minor changes by:guillermo
|
||||
- refactor: refs #6453 order_confirmWithUser by:guillermo
|
||||
- refactor: refs #7646 #7646 Deleted scannable* variables productionConfig by:guillermo
|
||||
- refactor: refs #7820 Deprecated silexACL by:guillermo
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- #6900 fix: #6900 rectificative filter by:jorgep
|
||||
- #6900 fix: empty commit by:jorgep
|
||||
- fix(orders_filter): add sourceApp accepts by:alexm
|
||||
- fix: refs #6130 commit lint by:pablone
|
||||
- fix: refs #6453 order_confirmWithUser by:guillermo
|
||||
- fix: refs #7283 sql by:jorgep
|
||||
- fix: refs #7713 ACL Log by:guillermo
|
||||
- test: fix claim descriptor redirect to lilium by:alexm
|
||||
- test: fix ticket redirect to lilium by:alexm
|
||||
- test: fix ticket sale e2e by:alexm
|
||||
|
||||
# Version 24.32 - 2024-08-06
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- chore: refs #7197 add supplierActivityFk filter by:jorgep
|
||||
- feat checkExpeditionPrintOut refs #7751 by:sergiodt
|
||||
- feat(defaulter_filter): add department by:alexm
|
||||
- feat: redirect to lilium page not found by:alexm
|
||||
- feat: refactor buyUltimate refs #7736 by:Carlos Andrés
|
||||
- feat: refs #6403 add delete by:pablone
|
||||
- feat: refs #7126 Added manaClaim calc by:guillermo
|
||||
- feat: refs #7126 Refactor and added columns in bs.waste table & proc by:guillermo
|
||||
- feat: refs #7197 filter by correcting by:jorgep
|
||||
- feat: refs #7297 add new columns by:pablone
|
||||
- feat: refs #7356 new parameters in sql for Weekly tickets front by:Jon
|
||||
- feat: refs #7401 redirect lilium by:pablone
|
||||
- feat: refs #7511 Fix tests by:guillermo
|
||||
- feat: refs #7511 Rename to multiConfig tables by:guillermo
|
||||
- feat: refs #7589 Added display (item_valuateInventory) by:guillermo
|
||||
- feat: refs #7589 Added vItemTypeFk & vItemCategoryFk (item_valuateInventory) by:guillermo
|
||||
- feat: refs #7681 Changes by:guillermo
|
||||
- feat: refs #7681 Optimization and refactor by:guillermo
|
||||
- feat: refs #7683 drop temporary table by:robert
|
||||
- feat: refs #7683 productionControl by:robert
|
||||
- feat: refs #7728 Added throw due date by:guillermo
|
||||
- feat: refs #7740 Ticket before update added restriction by:guillermo
|
||||
- feat(salix): #7648 Add field for endpoint as buyLabel report by:Javier Segarra
|
||||
- feat(salix): #7648 remove white line by:Javier Segarra
|
||||
- feat: tabla config dias margen vctos. refs #7728 by:Carlos Andrés
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- eat: refactor buyUltimate refs #7736 by:Carlos Andrés
|
||||
- feat: refactor buyUltimate refs #7736 by:Carlos Andrés
|
||||
- feat: refs #7681 Optimization and refactor by:guillermo
|
||||
- refactor: refs #7126 Requested changes by:guillermo
|
||||
- refactor: refs #7511 Minor change by:guillermo
|
||||
- refactor: refs #7640 Multipleinventory available by:guillermo
|
||||
- refactor: refs #7681 Changes by:guillermo
|
||||
- refactor: refs #7681 Requested changes by:guillermo
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- add prefix (hotFix_liliumRedirection) by:alexm
|
||||
- fix(client_filter): add recovery by:alexm
|
||||
- fix: defaulter filter correct sql (6943-fix_defaulter_filter) by:alexm
|
||||
- fix(deletExpeditions): merge test → dev by:guillermo
|
||||
- fix: refs #6403 fix mrw cancel shipment return type by:pablone
|
||||
- fix: refs #7126 Added addressWaste type by:guillermo
|
||||
- fix: refs #7126 Fix by:guillermo
|
||||
- fix: refs #7126 Minor change by:guillermo
|
||||
- fix: refs #7126 Primary key no unique data by:guillermo
|
||||
- fix: refs #7126 Slow update by:guillermo
|
||||
- fix: refs #7511 Minor change by:guillermo
|
||||
- fix: refs #7546 Deleted insert util.binlogQueue by:guillermo
|
||||
- fix: refs #7811 Variables pm2 by:guillermo
|
||||
- fix: without path by:alexm
|
||||
|
||||
# Version 24.28 - 2024-07-09
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- feat boxPicking refs #7357 by:sergiodt
|
||||
- feat boxPicking refs #7357 (origin/7357_dipole_review) by:sergiodt
|
||||
- feat:concurrency issue refs #6861 by:Carlos Andrés
|
||||
- feat expeditionPalletPrint refs #5210 by:sergiodt
|
||||
- feat front-reservas refs #6861 (origin/6861-Reservas-front) by:sergiodt
|
||||
- feat itemShelving_filterBuyer refs #7023 by:sergiodt
|
||||
- feat itemShelvingLog refs #7168 by:sergiodt
|
||||
- feat itemShelvingSale refs #6861 by:sergiodt
|
||||
- feat: previas con reserva refs #6861 by:Carlos Andrés
|
||||
- feat: previas con sitema de reservas refs #6861 by:Carlos Andrés
|
||||
- feat: previas con sitema de reservas refs #6861 (origin/6861-Pasar-modo-trabajo-de-previa-a-reservas) by:Carlos Andrés
|
||||
- feat refactor setParking REGEXP refs #7575 (origin/7575_setParking_regExp) by:sergiodt
|
||||
- feat: refs #6238 add travelKgPercentage table and model (origin/6238-addPercentage) by:jorgep
|
||||
- feat: refs #6286 check if is teamBoss (origin/6286-setRightWorkerTimeControlAcls) by:jorgep
|
||||
- feat: refs #6701 Fix error by:guillermo
|
||||
- feat: refs #6861 trigger by:sergiodt
|
||||
- feat: refs #7027 mailError managed by:jgallego
|
||||
- feat: refs #7168 Added vRecords param in proc by:guillermo
|
||||
- feat: refs #7168 Minor change by:guillermo
|
||||
- feat: refs #7216 logUnpaid (origin/7216-clientUnpaid) by:jgallego
|
||||
- feat: refs #7216 triggers by:jgallego
|
||||
- feat: refs #7296 by:robert
|
||||
- feat: refs #7296 drop column expeditionTruckFk by:robert
|
||||
- feat: refs #7490 Changes (origin/7490-duaInvoiceInBooking) by:guillermo
|
||||
- feat: refs #7545 Deleted hasIncoterms client column (origin/7545-hasIncoterms) by:guillermo
|
||||
- feat: refs #7555 remove account.password__ by:alexm
|
||||
- feat: return sql check error by:alexm
|
||||
- feat roadmap refs #7195 by:sergiodt
|
||||
- #refs 5890 feat:add assignCollection by:sergiodt
|
||||
- refs#5890 feat: delete trigger and modify getTickets by:sergiodt
|
||||
- refs #5890 feat:itemShelving_add by:sergiodt
|
||||
- refs #5890 feat:reserves by:sergiodt
|
||||
- refs #5890 feat:trigger by:sergiodt
|
||||
- refs #5890 feat: triggers by:sergiodt
|
||||
- refs #6861 feat: getLock by:sergiodt
|
||||
- refs #6861 feat: obsrevation by:sergiodt
|
||||
- refs #6861 feat: previas a reservas by:sergiodt
|
||||
- refs #6861 feat:reserve previos by:sergiodt
|
||||
- refs #6861 feat: reservePrevious by:sergiodt
|
||||
- refs #6861 feat:reserveWithReservation by:sergiodt
|
||||
- refs #6861 feat:sectoCollection reserve by:sergiodt
|
||||
- refs #6861 feat: skipTest by:sergiodt
|
||||
- refs #6861 feat: trigger by:sergiodt
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- feat refactor setParking REGEXP refs #7575 (origin/7575_setParking_regExp) by:sergiodt
|
||||
- refactor: refs #5447 changed models by:Jon
|
||||
- refactor: refs #6238 drop useless round by:jorgep
|
||||
- refactor: refs #6286 replace name by:jorgep
|
||||
- refactor: refs #6701 Refactor claim_ratio_routine by:guillermo
|
||||
- refactor: refs #7490 Added final update by:guillermo
|
||||
- refactor: refs #7490 Deleted update duaInvoiceInBooking by:guillermo
|
||||
- refactor: refs #7490 Minor changes by:guillermo
|
||||
- refactor: refs #7519 Minor change by:guillermo
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- acls, fixtures, models by:carlossa
|
||||
- fix: refs #6238 delete unused SQL script by:jorgep
|
||||
- fix: refs #6238 insert ignore by:jorgep
|
||||
- fix: refs #6238 use scheme by:jorgep
|
||||
- fix: refs #6286 replace id for reason by:jorgep
|
||||
- fix: refs #6286 update WorkerTimeControl permissions by:jorgep
|
||||
- fix(WorkerIncome): refs #7409 fix models by:alexm
|
||||
- refs #5890 fix: dev by:sergiodt
|
||||
- refs #6897 fix entry Salix by:carlossa
|
||||
- refs #6897 fix es.yml by:carlossa
|
||||
- refs #6897 fix redirection by:carlossa
|
||||
- refs #6897 fix remove by:carlossa
|
||||
- refs #7406 fix back by:carlossa
|
||||
- refs #7406 fix pr by:carlossa
|
||||
- refs #7409 fix acls by:carlossa
|
||||
- refs #7409 fix back (origin/7409-workerIncome) by:carlossa
|
||||
- refs #7409 fix pr by:carlossa
|
||||
|
||||
# Version 24.24 - 2024-06-11
|
||||
|
||||
### Added 🆕
|
||||
|
||||
- 6281 feat:buyFk in itemShekving by:sergiodt
|
||||
- 6281 feat:buyFk in itemShelving by:sergiodt
|
||||
- feat: #6408 tests by:jgallego
|
||||
- feat: packaging refs #4021 (origin/4021_packaging) by:sergiodt
|
||||
- feat: refs #6021 add new field by:pablone
|
||||
- feat: refs #6281 change fixtures by:robert
|
||||
- feat: refs # 6408 test ok (origin/6408-rocketChat) by:jgallego
|
||||
- feat: refs #6477 productionConfig add column by:robert
|
||||
- feat: refs #6600 add column (origin/6600-createItemPhotoComment) by:jorgep
|
||||
- feat: refs #6600 Add photoMotivation column to item table and create itemPhotoComment table by:jorgep
|
||||
- feat: refs #6889 add back tests by:jorgep
|
||||
- feat: refs #6889 fixtures & models by:jorgep
|
||||
- feat : refs #6889 wip: check if is productionReviewer or owner by:jorgep
|
||||
- feat: refs #6942 set false isBooed & ledger by:jorgep
|
||||
- feat: refs #6942 toUnbook by:jorgep
|
||||
- feat: refs #6942 xdiario fixtures by:jorgep
|
||||
- feat: refs #7398 Change by:guillermo
|
||||
- feat: refs #7438 Added volume to item_valuateInventory by:guillermo
|
||||
- feat: refs #7438 Requested changes and little changes by:guillermo
|
||||
- refs #6281 feat:buyFk in itemShelving by:sergiodt
|
||||
- feat: refs #6449 item ID is displayed in the sale line by:jorgep
|
||||
|
||||
### Changed 📦
|
||||
|
||||
- refactor: refs #6600 add space by:jorgep
|
||||
- refactor: refs #6889 improve file loading logic by:jorgep
|
||||
- refactor: refs #6889 sale tests e2e by:jorgep
|
||||
- refactor: refs #6889 script sql (origin/6889-dropAddSaleByCode) by:jorgep
|
||||
- refactor: refs #6889 use addSale by:jorgep
|
||||
- refactor: refs #6942 toUnbook & drop buyer acls by:jorgep
|
||||
- refactor: refs #7398 Refactor and change ekt_scan (origin/7398-ektScan) by:guillermo
|
||||
- refactor: refs #7486 Optimized procs by:guillermo
|
||||
|
||||
### Fixed 🛠️
|
||||
|
||||
- feat: refs #6281 change fixtures by:robert
|
||||
- feat: refs #6889 fixtures & models by:jorgep
|
||||
- feat: refs #6942 xdiario fixtures by:jorgep
|
||||
- fix: checking process.env.NODE_ENV by:alexm
|
||||
- fix: en translations by:alexm
|
||||
- fix: move to boot (origin/7421-fix_checking_NODE_ENV, 7421-fix_checking_NODE_ENV) by:alexm
|
||||
- fix: refs #6095 filter by refFk null by:pablone
|
||||
- fix: refs #6600 rollback by:jorgep
|
||||
- fix: refs #6889 allocate 'productionReviewer' role to revision dep. workers & check if is owner or reviewer by:jorgep
|
||||
- fix: refs #6889 check if has collection or sectorCollection by:jorgep
|
||||
- fix: refs #6889 e2e tests by:jorgep
|
||||
- fix: refs #6889 fix back tests by:jorgep
|
||||
- fix: refs #6889 modify fixtures by:jorgep
|
||||
- fix: refs #6889 rollback by:jorgep
|
||||
- fix: refs #6942 acls & back by:jorgep
|
||||
- fix: refs #6942 add deleteById acl by:jorgep
|
||||
- fix: refs #6942 add test & change column name by:jorgep
|
||||
- fix: refs #6942 create invoiceIn acl by:jorgep
|
||||
- fix: refs #6942 delete by:jorgep
|
||||
- fix: refs #6942 drop quotes by:jorgep
|
||||
- fix : refs #6942 remove grafana update priv by:jorgep
|
||||
- fix: refs #6942 revoke update isBooked by:jorgep
|
||||
- fix: refs #6942 toBook/toUnbook by:jorgep
|
||||
- fix: refs #7442 Fix kubernetes deploy by:Juan Ferrer Toribio
|
||||
- fix(salix): refs #7272 #7272 Add aclService in routes.js by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Back validateToken endpoint by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Bug when acl not loaded by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Call validateToken by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Errors when Token not exists by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Front retry calls by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 i18n Error by:Javier Segarra
|
||||
- fix(salix): refs #7272 #7272 Remove aclService from auth.js by:Javier Segarra
|
||||
- fix: simplify by:alexm
|
||||
- fix traduction & e2e by:carlossa
|
||||
- refs #6820 fix back by:carlossa
|
||||
- refs #6820 fix pr by:carlossa
|
||||
- refs #6832 fix: ToItem (origin/6832_refactorBackToItem) by:Sergio De la torre
|
||||
- refs #7292 fix tback by:carlossa
|
||||
- refs #7296 fix pr errors, trad by:carlossa
|
||||
- test(salix): refs #7272 #7272 fix renew-token.spec by:Javier Segarra
|
||||
|
||||
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [24.20.01] - 2024-05-14
|
||||
|
||||
### Fixed
|
||||
- (Worker -> time-control) Corrección de errores
|
||||
- (InvoiceOut -> Crear factura) Cuando falla al crear una factura, se devuelve un error
|
||||
- (Worker -> Ver albarán) Ya no aparece la página en blanco
|
||||
|
||||
### Changed
|
||||
- (InvoiceOut) Las facturas ahora muestran el ticket del cual proviene el abono
|
||||
|
||||
## [24.18.01] - 2024-05-07
|
||||
|
||||
## [24.16.01] - 2024-04-18
|
||||
|
||||
## [2414.01] - 2024-04-04
|
||||
|
||||
## [2408.01] - 2024-02-22
|
||||
|
||||
## [2406.01] - 2024-02-08
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
## [2404.01] - 2024-01-25
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
## [2402.01] - 2024-01-11
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
## [2400.01] - 2024-01-04
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
## [2350.01] - 2023-12-14
|
||||
|
||||
### Características Añadidas 🆕
|
||||
|
||||
- **Tickets → Expediciones:** Añadido soporte para Viaexpress
|
||||
|
||||
## [2348.01] - 2023-11-30
|
||||
|
||||
### Características Añadidas 🆕
|
||||
|
||||
- **Tickets → Adelantar:** Permite mover lineas sin generar negativos
|
||||
- **Tickets → Adelantar:** Permite modificar la fecha de los tickets
|
||||
- **Trabajadores → Notificaciones:** Nueva sección (lilium)
|
||||
|
||||
### Correcciones 🛠️
|
||||
|
||||
- **Tickets → RocketChat:** Arreglada detección de cambios
|
||||
|
||||
## [2346.01] - 2023-11-16
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
## [2342.01] - 2023-11-02
|
||||
|
||||
### Added
|
||||
|
||||
- (Usuarios -> Foto) Se muestra la foto del trabajador
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Usuarios -> Historial) Abre el descriptor del usuario correctamente
|
||||
|
||||
## [2340.01] - 2023-10-05
|
||||
|
||||
## [2338.01] - 2023-09-21
|
||||
|
||||
### Added
|
||||
|
||||
- (Ticket -> Servicios) Se pueden abonar servicios
|
||||
- (Facturas -> Datos básicos) Muestra valores por defecto
|
||||
- (Facturas -> Borrado) Notificación al borrar un asiento ya enlazado en Sage
|
||||
|
||||
### Changed
|
||||
|
||||
- (Trabajadores -> Calendario) Icono de check arreglado cuando pulsas un tipo de dia
|
||||
|
||||
## [2336.01] - 2023-09-07
|
||||
|
||||
## [2334.01] - 2023-08-24
|
||||
|
||||
### Added
|
||||
|
||||
- (General -> Errores) Botón para enviar cau con los datos del error
|
||||
|
||||
## [2332.01] - 2023-08-10
|
||||
|
||||
### Added
|
||||
|
||||
- (Trabajadores -> Gestión documental) Soporte para Docuware
|
||||
- (General -> Agencia) Soporte para Viaexpress
|
||||
- (Tickets -> SMS) Nueva sección en Lilium
|
||||
|
||||
### Changed
|
||||
|
||||
- (General -> Tickets) Devuelve el motivo por el cual no es editable
|
||||
- (Desplegables -> Trabajadores) Mejorados
|
||||
- (General -> Clientes) Razón social y dirección en mayúsculas
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Clientes -> SMS) Al pasar el ratón por encima muestra el mensaje completo
|
||||
|
||||
## [2330.01] - 2023-07-27
|
||||
|
||||
### Added
|
||||
|
||||
- (Artículos -> Vista Previa) Añadido campo "Plástico reciclado"
|
||||
- (Rutas -> Troncales) Nueva sección
|
||||
- (Tickets -> Opciones) Opción establecer peso
|
||||
- (Clientes -> SMS) Nueva sección
|
||||
|
||||
### Changed
|
||||
|
||||
- (General -> Iconos) Añadidos nuevos iconos
|
||||
- (Clientes -> Razón social) Permite crear clientes con la misma razón social según el país
|
||||
|
||||
## [2328.01] - 2023-07-13
|
||||
|
||||
### Added
|
||||
|
||||
- (Clientes -> Morosos) Añadida columna "es trabajador"
|
||||
- (Trabajadores -> Departamentos) Nueva sección
|
||||
- (Trabajadores -> Departamentos) Añadido listado de Trabajadores por departamento
|
||||
- (Trabajadores -> Departamentos) Añadido características de departamento e información
|
||||
|
||||
### Changed
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Trabajadores -> Departamentos) Arreglado búscador
|
||||
|
||||
## [2326.01] - 2023-06-29
|
||||
|
||||
### Added
|
||||
|
||||
- (Entradas -> Correo) Al cambiar el tipo de cambio enviará un correo a las personas designadas
|
||||
- (General -> Históricos) Botón para ver el estado del registro en cada punto
|
||||
- (General -> Históricos) Al filtar por registro se muestra todo el histórial desde que fue creado
|
||||
- (Tickets -> Índice) Permite enviar varios albaranes a Docuware
|
||||
|
||||
### Changed
|
||||
|
||||
- (General -> Históricos) Los registros se muestran agrupados por usuario y entidad
|
||||
- (Facturas -> Facturación global) Optimizada, generación de PDFs y notificaciones en paralelo
|
||||
|
||||
### Fixed
|
||||
|
||||
- (General -> Históricos) Duplicidades eliminadas
|
||||
- (Facturas -> Facturación global) Solucionados fallos que paran el proceso
|
||||
|
||||
## [2324.01] - 2023-06-15
|
||||
|
||||
### Added
|
||||
|
||||
- (Tickets -> Abono) Al abonar permite crear el ticket abono con almacén o sin almmacén
|
||||
- (General -> Desplegables) Mejorada eficiencia de carga de datos
|
||||
- (General -> Históricos) Ahora, ademas de los ids, se muestra la descripión de los atributos
|
||||
- (General -> Históricos) Botón para hacer más ágil mostrar sólo los cambios en un registro
|
||||
- (General -> Históricos) Filtro por cambios
|
||||
|
||||
### Changed
|
||||
|
||||
- (General -> Permisos) Mejorada seguridad
|
||||
- (General -> Históricos) Elementos de la interfaz reorganizados para hacerla más ágil e intuitiva
|
||||
|
||||
### Fixed
|
||||
|
||||
-
|
||||
|
||||
## [2322.01] - 2023-06-01
|
||||
|
||||
### Added
|
||||
|
||||
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
|
||||
- (Artículos -> Histórico) Filtro para mostrar lo anterior al inventario
|
||||
- (Trabajadores -> Nuevo trabajador) Permite elegir el método de pago
|
||||
|
||||
### Changed
|
||||
|
||||
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
|
||||
- (Tickets -> Expediciones) Interfaz mejorada y contador añadido
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Tickets -> Líneas) Se permite hacer split de líneas al mismo ticket
|
||||
- (Tickets -> Cambiar estado) Ahora muestra la lista completa de todos los estados
|
||||
|
||||
## [2320.01] - 2023-05-25
|
||||
|
||||
### Added
|
||||
|
||||
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
|
||||
|
||||
### Changed
|
||||
|
||||
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
|
||||
|
||||
### Fixed
|
||||
|
||||
-
|
||||
|
||||
## [2318.01] - 2023-05-08
|
||||
|
||||
### Added
|
||||
|
||||
- (Usuarios -> Histórico) Nueva sección
|
||||
- (Roles -> Histórico) Nueva sección
|
||||
- (Trabajadores -> Dar de alta) Permite elegir el método de pago
|
||||
|
||||
### Changed
|
||||
|
||||
- (Artículo -> Precio fijado) Modificado el buscador superior por uno lateral
|
||||
- (Trabajadores -> Dar de alta) Quitada obligatoriedad del iban
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Ticket -> Boxing) Arreglado selección de horas
|
||||
- (Cesta -> Índice) Optimizada búsqueda
|
||||
|
||||
## [2314.01] - 2023-04-20
|
||||
|
||||
### Added
|
||||
|
||||
- (Clientes -> Morosos) Ahora se puede filtrar por las columnas "Desde" y "Fecha Ú. O.". También se envia un email al comercial cuando se añade una nota.
|
||||
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
|
||||
- (Facturas recibidas -> Bases negativas) Nueva sección
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Clientes -> Morosos) Ahora se mantienen los elementos seleccionados al hacer sroll.
|
||||
|
||||
## [2312.01] - 2023-04-06
|
||||
|
||||
### Added
|
||||
|
||||
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
|
||||
|
||||
### Changed
|
||||
|
||||
- (Monitor tickets) Cuando se filtra por 'Pendiente' ya no muestra los estados de 'Previa'
|
||||
- (Envíos -> Extra comunitarios) Se agrupan las entradas del mismo travel. Añadidos campos Referencia y Importe.
|
||||
- (Envíos -> Índice) Cambiado el buscador superior por uno lateral
|
||||
|
||||
## [2310.01] - 2023-03-23
|
||||
|
||||
### Added
|
||||
|
||||
- (Trabajadores -> Control de horario) Ahora se puede confirmar/no confirmar el registro horario de cada semana desde esta sección
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Clientes -> Listado extendido) Resuelto error al filtrar por clientes inactivos desde la columna "Activo"
|
||||
- (General) Al pasar el ratón por encima del icono de "Borrar" en un campo, se hacía más grande afectando a la interfaz
|
||||
|
||||
## [2308.01] - 2023-03-09
|
||||
|
||||
### Added
|
||||
|
||||
- (Proveedores -> Datos fiscales) Añadido checkbox 'Vies'
|
||||
- (Client -> Descriptor) Nuevo icono $ con barrotes para los clientes con impago
|
||||
- (Trabajador -> Datos Básicos) Añadido nuevo campo Taquilla
|
||||
- (Trabajador -> PDA) Nueva sección
|
||||
|
||||
### Changed
|
||||
|
||||
- (Ticket -> Borrar ticket) Restringido el borrado de tickets con abono
|
||||
|
||||
## [2306.01] - 2023-02-23
|
||||
|
||||
### Added
|
||||
|
||||
- (Tickets -> Datos Básicos) Mensaje de confirmación al intentar generar tickets con negativos
|
||||
- (Artículos) El visible y disponible se calcula a partir de un almacén diferente dependiendo de la sección en la que te encuentres. Se ha añadido un icono que informa sobre a partir de que almacén se esta calculando.
|
||||
|
||||
### Changed
|
||||
|
||||
- (General -> Inicio) Ahora permite recuperar la contraseña tanto con el correo de recuperación como el usuario
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Monitor de tickets) Cuando ordenas por columna, ya no se queda deshabilitado el botón de 'Actualizar'
|
||||
- (Zone -> Días de entrega) Al hacer click en un día, muestra correctamente las zonas
|
||||
- (Artículos) El disponible en la vista previa se muestra correctamente
|
||||
|
||||
## [2304.01] - 2023-02-09
|
||||
|
||||
### Added
|
||||
|
||||
- (Rutas) Al descargar varias facturas se comprime en un zip
|
||||
- (Trabajadores -> Nuevo trabajador) Nueva sección
|
||||
- (Tickets -> Adelantar tickets) Añadidos campos "líneas" y "litros" al ticket origen
|
||||
- (Tickets -> Adelantar tickets) Nuevo icono muestra cuando las agencias de los tickets origen/destino son distintas
|
||||
|
||||
### Changed
|
||||
|
||||
- (Entradas -> Compras) Cambiados los campos "Precio Grouping/Packing" por "PVP" y "Precio" por "Coste"
|
||||
- (Artículos -> Últimas entradas) Cambiados los campos "P.P.U." y "P.P.P." por "PVP"
|
||||
- (Rutas -> Sumario/Tickets) Actualizados campos de los tickets
|
||||
- (Proveedores -> Crear/Editar) Permite añadir Proveedores con la misma razón social pero con países distintos
|
||||
- (Tickets -> Adelantar tickets) Cambiados selectores de estado por checks "Pendiente origen/destino"
|
||||
- (Tickets -> Adelantar tickets) Cambiado stock de destino a origen.
|
||||
|
||||
### Fixed
|
||||
|
||||
- (Artículos -> Etiquetas) Permite intercambiar la relevancia entre dos etiquetas.
|
||||
- (Cliente -> Datos Fiscales) No se permite seleccionar 'Notificar vía e-mail' a los clientes sin e-mail
|
||||
- (Tickets -> Datos básicos) Permite guardar la hora de envío
|
||||
- (Tickets -> Añadir pago) Eliminado "null" en las referencias
|
||||
- (Tickets -> Adelantar tickets) Permite ordenar por importe
|
||||
- (Tickets -> Adelantar tickets) El filtrado por encajado muestra también los tickets sin tipo de encajado
|
||||
|
||||
## [2302.01] - 2023-01-26
|
||||
|
||||
### Added
|
||||
|
||||
- (General -> Inicio) Permite recuperar la contraseña
|
||||
- (Tickets -> Opciones) Subir albarán a Docuware
|
||||
- (Tickets -> Opciones) Enviar correo con PDF de Docuware
|
||||
- (Artículos -> Datos Básicos) Añadido campo Unidades/Caja
|
||||
|
||||
### Changed
|
||||
|
||||
- (Reclamaciones -> Descriptor) Cambiado el campo Agencia por Zona
|
||||
- (Tickets -> Líneas preparadas) Actualizada sección para que sea más visual
|
||||
|
||||
### Fixed
|
||||
|
||||
- (General) Al utilizar el traductor de Google se descuadraban los iconos
|
||||
|
||||
### Removed
|
||||
|
||||
- (Tickets -> Control clientes) Eliminada sección
|
|
@ -0,0 +1,43 @@
|
|||
FROM debian:stretch-slim
|
||||
ENV TZ Europe/Madrid
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
ca-certificates \
|
||||
gnupg2 \
|
||||
libfontconfig \
|
||||
&& apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
|
||||
libdbus-1-3 libexpat1 libfontconfig1 libgbm1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
|
||||
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
|
||||
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
|
||||
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget \
|
||||
&& curl -sL https://deb.nodesource.com/setup_12.x | bash - \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
&& apt-get purge -y --auto-remove \
|
||||
gnupg2 \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& npm -g install pm2
|
||||
|
||||
WORKDIR /salix
|
||||
COPY package.json package-lock.json ./
|
||||
COPY loopback/package.json loopback/
|
||||
COPY print/package.json print/
|
||||
RUN npm install --only=prod
|
||||
RUN npm --prefix ./print install --only=prod ./print
|
||||
|
||||
COPY loopback loopback
|
||||
COPY back back
|
||||
COPY modules modules
|
||||
COPY print print
|
||||
COPY \
|
||||
LICENSE \
|
||||
README.md \
|
||||
./
|
||||
|
||||
CMD ["pm2-runtime", "./back/process.yml"]
|
||||
|
||||
HEALTHCHECK --interval=15s --timeout=10s \
|
||||
CMD curl -f http://localhost:3000/api/Applications/status || exit 1
|
|
@ -1,258 +1,144 @@
|
|||
#!/usr/bin/env groovy
|
||||
|
||||
def PROTECTED_BRANCH
|
||||
def FROM_GIT
|
||||
def RUN_TESTS
|
||||
def RUN_BUILD
|
||||
|
||||
def BRANCH_ENV = [
|
||||
test: 'test',
|
||||
master: 'production'
|
||||
]
|
||||
|
||||
node {
|
||||
stage('Setup') {
|
||||
env.BACK_REPLICAS = 1
|
||||
env.NODE_ENV = BRANCH_ENV[env.BRANCH_NAME] ?: 'dev'
|
||||
|
||||
PROTECTED_BRANCH = [
|
||||
'dev',
|
||||
'test',
|
||||
'master'
|
||||
].contains(env.BRANCH_NAME)
|
||||
|
||||
FROM_GIT = env.JOB_NAME.startsWith('gitea/')
|
||||
RUN_TESTS = !PROTECTED_BRANCH && FROM_GIT
|
||||
RUN_BUILD = PROTECTED_BRANCH && FROM_GIT
|
||||
|
||||
// https://www.jenkins.io/doc/book/pipeline/jenkinsfile/#using-environment-variables
|
||||
echo "NODE_NAME: ${env.NODE_NAME}"
|
||||
echo "WORKSPACE: ${env.WORKSPACE}"
|
||||
|
||||
configFileProvider([
|
||||
configFile(fileId: 'salix.properties',
|
||||
variable: 'PROPS_FILE')
|
||||
]) {
|
||||
def props = readProperties file: PROPS_FILE
|
||||
props.each {key, value -> env."${key}" = value }
|
||||
props.each {key, value -> echo "${key}: ${value}" }
|
||||
}
|
||||
|
||||
if (PROTECTED_BRANCH) {
|
||||
configFileProvider([
|
||||
configFile(fileId: "salix.branch.${env.BRANCH_NAME}",
|
||||
variable: 'BRANCH_PROPS_FILE')
|
||||
]) {
|
||||
def props = readProperties file: BRANCH_PROPS_FILE
|
||||
props.each {key, value -> env."${key}" = value }
|
||||
props.each {key, value -> echo "${key}: ${value}" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pipeline {
|
||||
agent any
|
||||
options {
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
tools {
|
||||
nodejs 'node-v20'
|
||||
}
|
||||
environment {
|
||||
PROJECT_NAME = 'salix'
|
||||
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
|
||||
}
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
script {
|
||||
switch (env.BRANCH_NAME) {
|
||||
case 'master':
|
||||
env.NODE_ENV = 'production'
|
||||
env.BACK_REPLICAS = 4
|
||||
break
|
||||
case 'test':
|
||||
env.NODE_ENV = 'test'
|
||||
env.BACK_REPLICAS = 2
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
configFileProvider([
|
||||
configFile(fileId: "salix.groovy",
|
||||
variable: 'GROOVY_FILE')
|
||||
]) {
|
||||
load env.GROOVY_FILE
|
||||
}
|
||||
|
||||
setEnv()
|
||||
}
|
||||
}
|
||||
stage('Install') {
|
||||
environment {
|
||||
NODE_ENV = ''
|
||||
NODE_ENV = ""
|
||||
}
|
||||
parallel {
|
||||
stage('Back') {
|
||||
steps {
|
||||
sh 'pnpm install --prefer-offline'
|
||||
sh 'node node_modules/puppeteer/install.mjs'
|
||||
}
|
||||
}
|
||||
stage('Print') {
|
||||
when {
|
||||
expression { FROM_GIT }
|
||||
}
|
||||
steps {
|
||||
sh 'pnpm install --prefer-offline --prefix=print'
|
||||
}
|
||||
}
|
||||
stage('Front') {
|
||||
when {
|
||||
expression { FROM_GIT }
|
||||
}
|
||||
steps {
|
||||
sh 'pnpm install --prefer-offline --prefix=front'
|
||||
}
|
||||
steps {
|
||||
nodejs('node-v14') {
|
||||
sh 'npm install --no-audit --prefer-offline'
|
||||
sh 'gulp install --ci'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Stack') {
|
||||
stage('Test') {
|
||||
when { not { anyOf {
|
||||
branch 'test'
|
||||
branch 'master'
|
||||
}}}
|
||||
environment {
|
||||
NODE_ENV = ""
|
||||
}
|
||||
parallel {
|
||||
stage('Back') {
|
||||
stages {
|
||||
stage('Test') {
|
||||
when {
|
||||
expression { RUN_TESTS }
|
||||
}
|
||||
environment {
|
||||
NODE_ENV = ''
|
||||
}
|
||||
steps {
|
||||
sh 'node back/tests.js --junit'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit(
|
||||
testResults: 'junitresults.xml',
|
||||
allowEmptyResults: true
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
when {
|
||||
expression { RUN_BUILD }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = "${packageJson.version}-build${env.BUILD_ID}"
|
||||
}
|
||||
sh 'docker-compose build back'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Front') {
|
||||
when {
|
||||
expression { FROM_GIT }
|
||||
}
|
||||
stages {
|
||||
stage('Test') {
|
||||
when {
|
||||
expression { RUN_TESTS }
|
||||
}
|
||||
environment {
|
||||
NODE_ENV = ''
|
||||
}
|
||||
steps {
|
||||
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=10'
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit(
|
||||
testResults: 'junit.xml',
|
||||
allowEmptyResults: true
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
when {
|
||||
expression { RUN_BUILD }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = "${packageJson.version}-build${env.BUILD_ID}"
|
||||
}
|
||||
sh 'gulp build'
|
||||
sh 'docker-compose build front'
|
||||
}
|
||||
stage('Frontend') {
|
||||
steps {
|
||||
nodejs('node-v14') {
|
||||
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=2'
|
||||
}
|
||||
}
|
||||
}
|
||||
// stage('Backend') {
|
||||
// steps {
|
||||
// nodejs('node-v14') {
|
||||
// sh 'gulp launchBackTest --ci'
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
stage('Push') {
|
||||
when {
|
||||
expression { RUN_BUILD }
|
||||
}
|
||||
stage('Build') {
|
||||
when { anyOf {
|
||||
branch 'test'
|
||||
branch 'master'
|
||||
}}
|
||||
environment {
|
||||
CREDENTIALS = credentials('docker-registry')
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = "${packageJson.version}-build${env.BUILD_ID}"
|
||||
nodejs('node-v14') {
|
||||
sh 'gulp build'
|
||||
}
|
||||
sh 'docker login --username $CREDENTIALS_USR --password $CREDENTIALS_PSW $REGISTRY'
|
||||
sh 'docker-compose push'
|
||||
|
||||
dockerBuild()
|
||||
}
|
||||
}
|
||||
stage('Deploy') {
|
||||
when {
|
||||
expression { PROTECTED_BRANCH }
|
||||
when { anyOf {
|
||||
branch 'test'
|
||||
branch 'master'
|
||||
}}
|
||||
environment {
|
||||
DOCKER_HOST = "${env.SWARM_HOST}"
|
||||
}
|
||||
parallel {
|
||||
stage('Database') {
|
||||
steps {
|
||||
configFileProvider([
|
||||
configFile(fileId: "config.${env.NODE_ENV}.ini",
|
||||
variable: 'MYSQL_CONFIG')
|
||||
]) {
|
||||
sh 'mkdir -p db/remotes'
|
||||
sh 'cp "$MYSQL_CONFIG" db/remotes/$NODE_ENV.ini'
|
||||
}
|
||||
steps {
|
||||
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
|
||||
}
|
||||
}
|
||||
stage('Database') {
|
||||
when { anyOf {
|
||||
branch 'test'
|
||||
branch 'master'
|
||||
}}
|
||||
steps {
|
||||
configFileProvider([
|
||||
configFile(fileId: "config.${env.NODE_ENV}.ini",
|
||||
variable: 'MYSQL_CONFIG')
|
||||
]) {
|
||||
sh 'cp "$MYSQL_CONFIG" db/config.$NODE_ENV.ini'
|
||||
}
|
||||
|
||||
sh 'npx myt push $NODE_ENV --force --commit'
|
||||
}
|
||||
}
|
||||
stage('Kubernetes') {
|
||||
when {
|
||||
expression { FROM_GIT }
|
||||
}
|
||||
steps {
|
||||
script {
|
||||
def packageJson = readJSON file: 'package.json'
|
||||
env.VERSION = "${packageJson.version}-build${env.BUILD_ID}"
|
||||
}
|
||||
withKubeConfig([
|
||||
serverUrl: "$KUBERNETES_API",
|
||||
credentialsId: 'kubernetes',
|
||||
namespace: 'salix'
|
||||
]) {
|
||||
sh 'kubectl set image deployment/salix-back-$BRANCH_NAME salix-back-$BRANCH_NAME=$REGISTRY/salix-back:$VERSION'
|
||||
sh 'kubectl set image deployment/salix-front-$BRANCH_NAME salix-front-$BRANCH_NAME=$REGISTRY/salix-front:$VERSION'
|
||||
}
|
||||
}
|
||||
}
|
||||
sh 'db/import-changes.sh -f $NODE_ENV'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
success {
|
||||
always {
|
||||
script {
|
||||
if (env.BRANCH_NAME == 'master' && FROM_GIT) {
|
||||
env.GIT_COMMIT_MSG = sh(
|
||||
script: 'git log -1 --pretty=%B ${GIT_COMMIT}',
|
||||
returnStdout: true
|
||||
).trim()
|
||||
if (!['master', 'test'].contains(env.BRANCH_NAME)) {
|
||||
try {
|
||||
junit 'junitresults.xml'
|
||||
junit 'junit.xml'
|
||||
} catch (e) {
|
||||
echo e.toString()
|
||||
}
|
||||
}
|
||||
|
||||
String message = env.GIT_COMMIT_MSG
|
||||
int index = message.indexOf('\n')
|
||||
if (index != -1)
|
||||
message = message.substring(0, index)
|
||||
|
||||
setEnv()
|
||||
rocketSend(
|
||||
channel: 'vn-database',
|
||||
message: "*DB version uploaded:* ${message}"
|
||||
+"\n$COMMITTER_EMAIL ($BRANCH_NAME)"
|
||||
+"\n$RUN_DISPLAY_URL",
|
||||
rawMessage: true
|
||||
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return;
|
||||
try {
|
||||
mail(
|
||||
to: env.COMMITTER_EMAIL,
|
||||
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
|
||||
body: "Check status at ${env.BUILD_URL}"
|
||||
)
|
||||
} catch (e) {
|
||||
echo e.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
unsuccessful {
|
||||
setEnv()
|
||||
sendEmail()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
57
README.md
57
README.md
|
@ -8,27 +8,35 @@ Salix is also the scientific name of a beautifull tree! :)
|
|||
|
||||
Required applications.
|
||||
|
||||
* Node.js
|
||||
* Node.js = 14.x LTS
|
||||
* Docker
|
||||
* Git
|
||||
* MYT
|
||||
|
||||
You will need to install globally the following items.
|
||||
```
|
||||
$ sudo npm install -g jest gulp-cli
|
||||
```
|
||||
After installing MYT you will need the following item.
|
||||
|
||||
For the usage of jest --watch on macOs.
|
||||
```
|
||||
$ apt install libkrb5-dev libssl-dev
|
||||
$ brew install watchman
|
||||
```
|
||||
* [watchman](https://facebook.github.io/watchman/)
|
||||
|
||||
## Linux Only Prerequisites
|
||||
|
||||
Your user must be on the docker group to use it so you will need to run this command:
|
||||
```
|
||||
$ sudo usermod -a -G docker yourusername
|
||||
```
|
||||
|
||||
## Installing dependencies and launching
|
||||
## Getting Started // Installing
|
||||
|
||||
Pull from repository.
|
||||
|
||||
Run this commands on project root directory to install Node dependencies.
|
||||
```
|
||||
$ pnpm install
|
||||
$ npm install
|
||||
$ gulp install
|
||||
```
|
||||
|
||||
|
@ -46,34 +54,51 @@ $ gulp docker
|
|||
|
||||
For client-side unit tests run from project's root.
|
||||
```
|
||||
$ npm run test:front
|
||||
$ jest
|
||||
```
|
||||
|
||||
For server-side unit tests run from project's root.
|
||||
```
|
||||
$ npm run test:back
|
||||
$ gulp backTest
|
||||
```
|
||||
|
||||
For end-to-end tests run from project's root.
|
||||
```
|
||||
$ npm run test:e2e
|
||||
$ gulp e2e
|
||||
```
|
||||
|
||||
## Generate changeLog test → master
|
||||
```
|
||||
$ bash changelog.sh
|
||||
```
|
||||
|
||||
|
||||
## Visual Studio Code extensions
|
||||
|
||||
Open Visual Studio Code, press Ctrl+P and paste the following commands.
|
||||
|
||||
In Visual Studio Code we use the ESLint extension.
|
||||
In Visual Studio Code we use the ESLint extension.
|
||||
```
|
||||
ext install dbaeumer.vscode-eslint
|
||||
```
|
||||
|
||||
Gitlens for visualization of code authorship
|
||||
```
|
||||
ext install eamodio.gitlens
|
||||
```
|
||||
|
||||
Spanish language pack
|
||||
```
|
||||
ext install ms-ceintl.vscode-language-pack-es
|
||||
```
|
||||
|
||||
### Recommended extensions
|
||||
|
||||
Material icon Theme
|
||||
```
|
||||
ext install pkief.material-icon-theme
|
||||
```
|
||||
|
||||
Material UI Themes
|
||||
```
|
||||
ext install equinusocio.vsc-material-theme
|
||||
```
|
||||
|
||||
|
||||
## Built With
|
||||
|
||||
* [angularjs](https://angularjs.org/)
|
||||
|
|
|
@ -1,57 +0,0 @@
|
|||
FROM debian:bookworm-slim
|
||||
ENV TZ Europe/Madrid
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# NodeJs
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
ca-certificates \
|
||||
gnupg2 \
|
||||
graphicsmagick \
|
||||
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
|
||||
&& apt-get install -y --no-install-recommends nodejs \
|
||||
&& corepack enable pnpm
|
||||
|
||||
# Puppeteer
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
libfontconfig lftp xvfb gconf-service libasound2 libatk1.0-0 libc6 \
|
||||
libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgbm1 \
|
||||
libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 \
|
||||
libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 \
|
||||
libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 \
|
||||
libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \
|
||||
fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
|
||||
|
||||
# Extra dependencies
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
samba-common-bin samba-dsdb-modules\
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Salix
|
||||
|
||||
WORKDIR /salix
|
||||
|
||||
COPY print/package.json print/pnpm-lock.yaml print/
|
||||
RUN pnpm install --prod --prefix=print
|
||||
|
||||
COPY package.json pnpm-lock.yaml ./
|
||||
COPY loopback/package.json loopback/
|
||||
RUN pnpm install --prod
|
||||
|
||||
COPY loopback loopback
|
||||
COPY back back
|
||||
COPY modules modules
|
||||
COPY print print
|
||||
COPY \
|
||||
LICENSE \
|
||||
README.md \
|
||||
./
|
||||
|
||||
CMD ["node", "--tls-min-v1.0", "--openssl-legacy-provider", "./loopback/server/server.js"]
|
|
@ -1,5 +0,0 @@
|
|||
const baseTime = null; // new Date(2022, 0, 19, 8, 0, 0, 0);
|
||||
if (baseTime) {
|
||||
jasmine.clock().install();
|
||||
jasmine.clock().mockDate(baseTime);
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('acl', {
|
||||
description: 'Get the user information and permissions',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'ctx',
|
||||
type: 'Object',
|
||||
http: {source: 'context'}
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/acl`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.acl = async function(ctx) {
|
||||
let userId = ctx.req.accessToken.userId;
|
||||
let models = Self.app.models;
|
||||
|
||||
let user = await models.Account.findById(userId, {
|
||||
fields: ['id', 'name', 'nickname', 'email']
|
||||
});
|
||||
|
||||
let roles = await models.RoleMapping.find({
|
||||
fields: ['roleId'],
|
||||
where: {
|
||||
principalId: userId,
|
||||
principalType: 'USER'
|
||||
},
|
||||
include: [{
|
||||
relation: 'role',
|
||||
scope: {
|
||||
fields: ['name']
|
||||
}
|
||||
}]
|
||||
});
|
||||
|
||||
return {roles, user};
|
||||
};
|
||||
};
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('changePassword', {
|
||||
description: 'Changes the user password',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'Number',
|
||||
description: 'The user id',
|
||||
http: {source: 'path'}
|
||||
}, {
|
||||
arg: 'oldPassword',
|
||||
type: 'String',
|
||||
description: 'The old password',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'newPassword',
|
||||
type: 'String',
|
||||
description: 'The new password',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
http: {
|
||||
path: `/:id/changePassword`,
|
||||
verb: 'PATCH'
|
||||
}
|
||||
});
|
||||
|
||||
Self.changePassword = async function(id, oldPassword, newPassword) {
|
||||
await Self.rawSql(`CALL account.user_changePassword(?, ?, ?)`,
|
||||
[id, oldPassword, newPassword]);
|
||||
await Self.app.models.UserAccount.syncById(id, newPassword);
|
||||
};
|
||||
};
|
|
@ -0,0 +1,70 @@
|
|||
const md5 = require('md5');
|
||||
const UserError = require('vn-loopback/util/user-error');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('login', {
|
||||
description: 'Login a user with username/email and password',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'user',
|
||||
type: 'String',
|
||||
description: 'The user name or email',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'password',
|
||||
type: 'String',
|
||||
description: 'The password'
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/login`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.login = async function(user, password) {
|
||||
let $ = Self.app.models;
|
||||
let token;
|
||||
let usesEmail = user.indexOf('@') !== -1;
|
||||
|
||||
let userInfo = usesEmail
|
||||
? {email: user}
|
||||
: {username: user};
|
||||
let instance = await $.User.findOne({
|
||||
fields: ['username', 'password'],
|
||||
where: userInfo
|
||||
});
|
||||
|
||||
let where = usesEmail
|
||||
? {email: user}
|
||||
: {name: user};
|
||||
let account = await Self.findOne({
|
||||
fields: ['active', 'password'],
|
||||
where
|
||||
});
|
||||
|
||||
let validCredentials = instance && (
|
||||
await instance.hasPassword(password) ||
|
||||
account.password == md5(password || '')
|
||||
);
|
||||
|
||||
if (validCredentials) {
|
||||
if (!account.active)
|
||||
throw new UserError('User disabled');
|
||||
|
||||
try {
|
||||
await $.UserAccount.sync(instance.username, password);
|
||||
} catch (err) {
|
||||
console.warn(err);
|
||||
}
|
||||
}
|
||||
|
||||
let loginInfo = Object.assign({password}, userInfo);
|
||||
token = await $.User.login(loginInfo, 'user');
|
||||
return {token: token.id};
|
||||
};
|
||||
};
|
|
@ -0,0 +1,25 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('logout', {
|
||||
description: 'Logout a user with access token',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'ctx',
|
||||
type: 'Object',
|
||||
http: {source: 'context'}
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
type: 'Boolean',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/logout`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.logout = async function(ctx) {
|
||||
await Self.app.models.User.logout(ctx.req.accessToken.id);
|
||||
return true;
|
||||
};
|
||||
};
|
|
@ -0,0 +1,29 @@
|
|||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('setPassword', {
|
||||
description: 'Sets the user password',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'Number',
|
||||
description: 'The user id',
|
||||
http: {source: 'path'}
|
||||
}, {
|
||||
arg: 'newPassword',
|
||||
type: 'String',
|
||||
description: 'The new password',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
http: {
|
||||
path: `/:id/setPassword`,
|
||||
verb: 'PATCH'
|
||||
}
|
||||
});
|
||||
|
||||
Self.setPassword = async function(id, newPassword) {
|
||||
await Self.rawSql(`CALL account.user_setPassword(?, ?)`,
|
||||
[id, newPassword]);
|
||||
await Self.app.models.UserAccount.syncById(id, newPassword);
|
||||
};
|
||||
};
|
|
@ -0,0 +1,9 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('account changePassword()', () => {
|
||||
it('should throw an error when old password is wrong', async() => {
|
||||
let req = app.models.Account.changePassword(null, 1, 'wrongOldPass', 'newPass');
|
||||
|
||||
await expectAsync(req).toBeRejected();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,41 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('account login()', () => {
|
||||
describe('when credentials are correct', () => {
|
||||
it('should return the token', async() => {
|
||||
let login = await app.models.Account.login('salesAssistant', 'nightmare');
|
||||
let accessToken = await app.models.AccessToken.findById(login.token);
|
||||
let ctx = {req: {accessToken: accessToken}};
|
||||
|
||||
expect(login.token).toBeDefined();
|
||||
|
||||
await app.models.Account.logout(ctx);
|
||||
});
|
||||
|
||||
it('should return the token if the user doesnt exist but the client does', async() => {
|
||||
let login = await app.models.Account.login('PetterParker', 'nightmare');
|
||||
let accessToken = await app.models.AccessToken.findById(login.token);
|
||||
let ctx = {req: {accessToken: accessToken}};
|
||||
|
||||
expect(login.token).toBeDefined();
|
||||
|
||||
await app.models.Account.logout(ctx);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when credentials are incorrect', () => {
|
||||
it('should throw a 401 error', async() => {
|
||||
let error;
|
||||
|
||||
try {
|
||||
await app.models.Account.login('IDontExist', 'TotallyWrongPassword');
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.statusCode).toBe(401);
|
||||
expect(error.code).toBe('LOGIN_FAILED');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,42 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('account logout()', () => {
|
||||
it('should logout and remove token after valid login', async() => {
|
||||
let loginResponse = await app.models.Account.login('buyer', 'nightmare');
|
||||
let accessToken = await app.models.AccessToken.findById(loginResponse.token);
|
||||
let ctx = {req: {accessToken: accessToken}};
|
||||
|
||||
let logoutResponse = await app.models.Account.logout(ctx);
|
||||
let tokenAfterLogout = await app.models.AccessToken.findById(loginResponse.token);
|
||||
|
||||
expect(logoutResponse).toBeTrue();
|
||||
expect(tokenAfterLogout).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw a 401 error when token is invalid', async() => {
|
||||
let error;
|
||||
let ctx = {req: {accessToken: {id: 'invalidToken'}}};
|
||||
|
||||
try {
|
||||
response = await app.models.Account.logout(ctx);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
expect(error.statusCode).toBe(401);
|
||||
});
|
||||
|
||||
it('should throw an error when no token is passed', async() => {
|
||||
let error;
|
||||
let ctx = {req: {accessToken: null}};
|
||||
|
||||
try {
|
||||
response = await app.models.Account.logout(ctx);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
|
||||
expect(error).toBeDefined();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,15 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('account changePassword()', () => {
|
||||
it('should throw an error when password does not meet requirements', async() => {
|
||||
let req = app.models.Account.setPassword(1, 'insecurePass');
|
||||
|
||||
await expectAsync(req).toBeRejected();
|
||||
});
|
||||
|
||||
it('should update password when it passes requirements', async() => {
|
||||
let req = app.models.Account.setPassword(1, 'Very$ecurePa22.');
|
||||
|
||||
await expectAsync(req).toBeResolved();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,17 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('validateToken', {
|
||||
description: 'Validates the current logged user token',
|
||||
returns: {
|
||||
type: 'Boolean',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/validateToken`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.validateToken = async function() {
|
||||
return true;
|
||||
};
|
||||
};
|
|
@ -22,19 +22,15 @@ module.exports = Self => {
|
|||
|
||||
Self.latest = async filter => {
|
||||
const conn = Self.dataSource.connector;
|
||||
const minDate = Date.vnNew();
|
||||
const minDate = new Date();
|
||||
minDate.setFullYear(minDate.getFullYear() - 1);
|
||||
|
||||
const where = {dated: {gte: minDate}};
|
||||
filter = mergeFilters(filter, {where});
|
||||
|
||||
const stmt = new ParameterizedSQL(
|
||||
`SELECT * FROM (`);
|
||||
stmt.merge('SELECT * FROM campaign');
|
||||
`SELECT * FROM campaign`);
|
||||
stmt.merge(conn.makeWhere(filter.where));
|
||||
stmt.merge('ORDER BY dated ASC');
|
||||
stmt.merge('LIMIT 10000000000000000000');
|
||||
stmt.merge(') sub');
|
||||
stmt.merge('GROUP BY code');
|
||||
stmt.merge(conn.makePagination(filter));
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('campaign latest()', () => {
|
||||
it('should return the campaigns from the last year', async() => {
|
||||
const now = Date.vnNew();
|
||||
const result = await models.Campaign.latest();
|
||||
const now = new Date();
|
||||
const result = await app.models.Campaign.latest();
|
||||
const randomIndex = Math.floor(Math.random() * result.length);
|
||||
const campaignDated = result[randomIndex].dated;
|
||||
|
||||
|
@ -12,9 +12,9 @@ describe('campaign latest()', () => {
|
|||
});
|
||||
|
||||
it('should return the campaigns from the current year', async() => {
|
||||
const now = Date.vnNew();
|
||||
const now = new Date();
|
||||
const currentYear = now.getFullYear();
|
||||
const result = await models.Campaign.latest({
|
||||
const result = await app.models.Campaign.latest({
|
||||
where: {dated: {like: `%${currentYear}%`}}
|
||||
});
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('campaign upcoming()', () => {
|
||||
it('should return the upcoming campaign but from the last year', async() => {
|
||||
const response = await models.Campaign.upcoming();
|
||||
const response = await app.models.Campaign.upcoming();
|
||||
const campaignDated = response.dated;
|
||||
const now = Date.vnNew();
|
||||
const now = new Date();
|
||||
|
||||
expect(campaignDated).toEqual(jasmine.any(Date));
|
||||
expect(campaignDated).toBeLessThanOrEqual(now);
|
||||
|
|
|
@ -14,7 +14,7 @@ module.exports = Self => {
|
|||
});
|
||||
|
||||
Self.upcoming = async() => {
|
||||
const minDate = Date.vnNew();
|
||||
const minDate = new Date();
|
||||
minDate.setFullYear(minDate.getFullYear() - 1);
|
||||
|
||||
return Self.findOne({
|
||||
|
|
|
@ -1,55 +0,0 @@
|
|||
const axios = require('axios');
|
||||
const tokenLifespan = 10;
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('getServiceAuth', {
|
||||
description: 'Authenticates with the service and request a new token',
|
||||
accessType: 'READ',
|
||||
accepts: [],
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/getServiceAuth`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.getServiceAuth = async() => {
|
||||
if (!this.login)
|
||||
this.login = await requestToken();
|
||||
|
||||
if (!this.login) return;
|
||||
|
||||
if (Date.vnNow() > this.login.expires)
|
||||
this.login = await requestToken();
|
||||
|
||||
return this.login;
|
||||
};
|
||||
|
||||
/**
|
||||
* Requests a new Rocketchat token
|
||||
*/
|
||||
async function requestToken() {
|
||||
const models = Self.app.models;
|
||||
const chatConfig = await models.ChatConfig.findOne();
|
||||
|
||||
const {data} = await axios.post(`${chatConfig.api}/login`, {
|
||||
user: chatConfig.user,
|
||||
password: chatConfig.password
|
||||
});
|
||||
|
||||
const requestData = data.data;
|
||||
if (requestData) {
|
||||
return {
|
||||
host: chatConfig.host,
|
||||
api: chatConfig.api,
|
||||
auth: {
|
||||
userId: requestData.userId,
|
||||
token: requestData.authToken
|
||||
},
|
||||
expires: Date.vnNow() + (1000 * 60 * tokenLifespan)
|
||||
};
|
||||
}
|
||||
}
|
||||
};
|
|
@ -8,7 +8,7 @@ module.exports = Self => {
|
|||
},
|
||||
http: {
|
||||
path: `/notifyIssues`,
|
||||
verb: 'POST'
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -32,7 +32,7 @@ module.exports = Self => {
|
|||
let message = $t(`There's a new urgent ticket:`);
|
||||
const ostUri = 'https://cau.verdnatura.es/scp/tickets.php?id=';
|
||||
tickets.forEach(ticket => {
|
||||
message += `\r\n[ID: ${ticket.number} - ${ticket.subject} @${ticket.username}](${ostUri + ticket.id})`;
|
||||
message += `\r\n[ID: *${ticket.number}* - ${ticket.subject} (@${ticket.username})](${ostUri + ticket.id})`;
|
||||
});
|
||||
|
||||
const department = await models.Department.findOne({
|
||||
|
@ -42,5 +42,7 @@ module.exports = Self => {
|
|||
|
||||
if (channelName)
|
||||
return Self.send(ctx, `#${channelName}`, `@all ➔ ${message}`);
|
||||
|
||||
return;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
const got = require('got');
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('send', {
|
||||
description: 'Creates a direct message in the chat model for a user or a channel',
|
||||
description: 'Send a RocketChat message',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'to',
|
||||
|
@ -26,30 +27,125 @@ module.exports = Self => {
|
|||
Self.send = async(ctx, to, message) => {
|
||||
const models = Self.app.models;
|
||||
const accessToken = ctx.req.accessToken;
|
||||
const sender = await models.VnUser.findById(accessToken.userId);
|
||||
const sender = await models.Account.findById(accessToken.userId);
|
||||
const recipient = to.replace('@', '');
|
||||
|
||||
if (sender.name != recipient) {
|
||||
const chat = await models.Chat.create({
|
||||
senderFk: sender.id,
|
||||
recipient: to,
|
||||
dated: Date.vnNew(),
|
||||
checkUserStatus: 0,
|
||||
message: message,
|
||||
status: 'sending',
|
||||
attempts: 0
|
||||
});
|
||||
let {body} = await sendMessage(sender, to, message);
|
||||
if (body)
|
||||
body = JSON.parse(body);
|
||||
else
|
||||
body = false;
|
||||
|
||||
try {
|
||||
await Self.sendMessage(chat.senderFk, chat.recipient, chat.message);
|
||||
await Self.updateChat(chat, 'sent');
|
||||
} catch (error) {
|
||||
await Self.updateChat(chat, 'error', error);
|
||||
}
|
||||
|
||||
return true;
|
||||
return body;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
async function sendMessage(sender, channel, message) {
|
||||
const config = await getConfig();
|
||||
const avatar = `${config.host}/avatar/${sender.name}`;
|
||||
const uri = `${config.api}/chat.postMessage`;
|
||||
|
||||
return sendAuth(uri, {
|
||||
'channel': channel,
|
||||
'avatar': avatar,
|
||||
'alias': sender.nickname,
|
||||
'text': message
|
||||
}).catch(async error => {
|
||||
if (error.statusCode === 401) {
|
||||
this.auth = null;
|
||||
|
||||
return sendMessage(sender, channel, message);
|
||||
}
|
||||
|
||||
throw new Error(error.message);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a rocketchat token
|
||||
* @return {Object} userId and authToken
|
||||
*/
|
||||
async function getAuthToken() {
|
||||
if (!this.auth || this.auth && !this.auth.authToken) {
|
||||
const config = await getConfig();
|
||||
const uri = `${config.api}/login`;
|
||||
let {body} = await send(uri, {
|
||||
user: config.user,
|
||||
password: config.password
|
||||
});
|
||||
|
||||
if (body) {
|
||||
body = JSON.parse(body);
|
||||
this.auth = body.data;
|
||||
}
|
||||
}
|
||||
|
||||
return this.auth;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a rocketchat config
|
||||
* @return {Object} Auth config
|
||||
*/
|
||||
async function getConfig() {
|
||||
if (!this.chatConfig) {
|
||||
const models = Self.app.models;
|
||||
|
||||
this.chatConfig = await models.ChatConfig.findOne();
|
||||
}
|
||||
|
||||
return this.chatConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send unauthenticated request
|
||||
* @param {*} uri - Request uri
|
||||
* @param {*} params - Request params
|
||||
* @param {*} options - Request options
|
||||
*
|
||||
* @return {Object} Request response
|
||||
*/
|
||||
async function send(uri, params, options = {}) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
return new Promise(resolve => {
|
||||
return resolve({
|
||||
body: JSON.stringify(
|
||||
{statusCode: 200, message: 'Fake notification sent'}
|
||||
)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const defaultOptions = {
|
||||
body: params
|
||||
};
|
||||
|
||||
if (options) Object.assign(defaultOptions, options);
|
||||
|
||||
return got.post(uri, defaultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send authenticated request
|
||||
* @param {*} uri - Request uri
|
||||
* @param {*} body - Request params
|
||||
*
|
||||
* @return {Object} Request response
|
||||
*/
|
||||
async function sendAuth(uri, body) {
|
||||
const login = await getAuthToken();
|
||||
const options = {
|
||||
headers: {}
|
||||
};
|
||||
|
||||
if (login) {
|
||||
options.headers['X-Auth-Token'] = login.authToken;
|
||||
options.headers['X-User-Id'] = login.userId;
|
||||
}
|
||||
|
||||
return send(uri, body, options);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,23 +1,21 @@
|
|||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('sendCheckingPresence', {
|
||||
description: 'Creates a message in the chat model checking the user status',
|
||||
description: 'Sends a RocketChat message to a working worker or department channel',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'workerId',
|
||||
type: 'number',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The recipient user id'
|
||||
description: 'The worker id of the destinatary'
|
||||
},
|
||||
{
|
||||
arg: 'message',
|
||||
type: 'string',
|
||||
type: 'String',
|
||||
required: true,
|
||||
description: 'The message'
|
||||
}],
|
||||
returns: {
|
||||
type: 'object',
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
|
@ -26,39 +24,39 @@ module.exports = Self => {
|
|||
}
|
||||
});
|
||||
|
||||
Self.sendCheckingPresence = async(ctx, recipientId, message) => {
|
||||
Self.sendCheckingPresence = async(ctx, recipientId, message, options) => {
|
||||
if (!recipientId) return false;
|
||||
|
||||
const myOptions = {};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
const models = Self.app.models;
|
||||
|
||||
const account = await models.Account.findById(recipientId, null, myOptions);
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const sender = await models.VnUser.findById(userId, {fields: ['id']});
|
||||
const recipient = await models.VnUser.findById(recipientId, null);
|
||||
|
||||
// Prevent sending messages to yourself
|
||||
if (recipientId == userId) return false;
|
||||
if (!recipient)
|
||||
|
||||
if (!account)
|
||||
throw new Error(`Could not send message "${message}" to worker id ${recipientId} from user ${userId}`);
|
||||
|
||||
if (!isProduction())
|
||||
message = `[Test:Environment to user ${userId}] ` + message;
|
||||
const query = `SELECT worker_isWorking(?) isWorking`;
|
||||
const [result] = await Self.rawSql(query, [recipientId], myOptions);
|
||||
|
||||
const chat = await models.Chat.create({
|
||||
senderFk: sender.id,
|
||||
recipient: `@${recipient.name}`,
|
||||
dated: Date.vnNew(),
|
||||
checkUserStatus: 1,
|
||||
message: message,
|
||||
status: 'sending',
|
||||
attempts: 0
|
||||
});
|
||||
if (!result.isWorking) {
|
||||
const workerDepartment = await models.WorkerDepartment.findById(recipientId, {
|
||||
include: {
|
||||
relation: 'department'
|
||||
}
|
||||
}, myOptions);
|
||||
const department = workerDepartment && workerDepartment.department();
|
||||
const channelName = department && department.chatName;
|
||||
|
||||
try {
|
||||
await Self.sendCheckingUserStatus(chat);
|
||||
await Self.updateChat(chat, 'sent');
|
||||
} catch (error) {
|
||||
await Self.updateChat(chat, 'error', error);
|
||||
if (channelName)
|
||||
return Self.send(ctx, `#${channelName}`, `@${account.name} ➔ ${message}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
return Self.send(ctx, `@${account.name}`, message);
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,176 +0,0 @@
|
|||
const axios = require('axios');
|
||||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('sendQueued', {
|
||||
description: 'Send a RocketChat message',
|
||||
accessType: 'WRITE',
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/sendQueued`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.sendQueued = async() => {
|
||||
const models = Self.app.models;
|
||||
|
||||
const chats = await models.Chat.find({
|
||||
where: {
|
||||
status: {
|
||||
nin: [
|
||||
'sent',
|
||||
'sending'
|
||||
]
|
||||
|
||||
},
|
||||
attempts: {lt: 3}
|
||||
}
|
||||
});
|
||||
|
||||
for (let chat of chats) {
|
||||
if (chat.checkUserStatus) {
|
||||
try {
|
||||
await Self.sendCheckingUserStatus(chat);
|
||||
await Self.updateChat(chat, 'sent');
|
||||
} catch (error) {
|
||||
await Self.updateChat(chat, 'error', error);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
await Self.sendMessage(chat.senderFk, chat.recipient, chat.message);
|
||||
await Self.updateChat(chat, 'sent');
|
||||
} catch (error) {
|
||||
await Self.updateChat(chat, 'error', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Check user status in Rocket
|
||||
*
|
||||
* @param {object} chat - The sender id
|
||||
* @return {Promise} - The request promise
|
||||
*/
|
||||
Self.sendCheckingUserStatus = async function sendCheckingUserStatus(chat) {
|
||||
const models = Self.app.models;
|
||||
|
||||
const recipientName = chat.recipient.slice(1);
|
||||
const recipient = await models.VnUser.findOne({
|
||||
where: {
|
||||
name: recipientName
|
||||
}
|
||||
});
|
||||
|
||||
const {data} = await Self.getUserStatus(recipient.name);
|
||||
if (data) {
|
||||
if (data.status === 'offline' || data.status === 'busy') {
|
||||
// Send message to department room
|
||||
const workerDepartment = await models.WorkerDepartment.findById(recipient.id, {
|
||||
include: {
|
||||
relation: 'department'
|
||||
}
|
||||
});
|
||||
const department = workerDepartment && workerDepartment.department();
|
||||
const channelName = department && department.chatName;
|
||||
|
||||
if (channelName)
|
||||
return Self.sendMessage(chat.senderFk, `#${channelName}`, `@${recipient.name} ➔ ${chat.message}`);
|
||||
else
|
||||
return Self.sendMessage(chat.senderFk, `@${recipient.name}`, chat.message);
|
||||
} else
|
||||
return Self.sendMessage(chat.senderFk, `@${recipient.name}`, chat.message);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Send a rocket message
|
||||
*
|
||||
* @param {object} senderFk - The sender id
|
||||
* @param {string} recipient - The user (@) or channel (#) to send the message
|
||||
* @param {string} message - The message to send
|
||||
* @return {Promise} - The request promise
|
||||
*/
|
||||
Self.sendMessage = async function sendMessage(senderFk, recipient, message) {
|
||||
if (!isProduction(false)) {
|
||||
return new Promise(resolve => {
|
||||
return resolve({
|
||||
statusCode: 200,
|
||||
message: 'Fake notification sent'
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const models = Self.app.models;
|
||||
const sender = await models.VnUser.findById(senderFk);
|
||||
|
||||
const login = await Self.getServiceAuth();
|
||||
const avatar = `${login.host}/avatar/${sender.name}`;
|
||||
|
||||
const options = {
|
||||
headers: {
|
||||
'X-Auth-Token': login.auth.token,
|
||||
'X-User-Id': login.auth.userId
|
||||
},
|
||||
};
|
||||
|
||||
return axios.post(`${login.api}/chat.postMessage`, {
|
||||
'channel': recipient,
|
||||
'avatar': avatar,
|
||||
'alias': sender.nickname,
|
||||
'text': message
|
||||
}, options);
|
||||
};
|
||||
|
||||
/**
|
||||
* Update status and attempts of a chat
|
||||
*
|
||||
* @param {object} chat - The chat
|
||||
* @param {string} status - The new status
|
||||
* @param {string} error - The error
|
||||
* @param {object} options - Query options
|
||||
* @return {Promise} - The request promise
|
||||
*/
|
||||
|
||||
Self.updateChat = async(chat, status, error) => {
|
||||
return chat.updateAttributes({
|
||||
status: status,
|
||||
attempts: ++chat.attempts,
|
||||
error: error
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the current user status on Rocketchat
|
||||
*
|
||||
* @param {string} username - The recipient user name
|
||||
* @return {Promise} - The request promise
|
||||
*/
|
||||
Self.getUserStatus = async function getUserStatus(username) {
|
||||
if (!isProduction(false)) {
|
||||
return new Promise(resolve => {
|
||||
return resolve({
|
||||
data: {
|
||||
status: 'online'
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const login = await Self.getServiceAuth();
|
||||
|
||||
const options = {
|
||||
params: {username},
|
||||
headers: {
|
||||
'X-Auth-Token': login.auth.token,
|
||||
'X-User-Id': login.auth.userId
|
||||
},
|
||||
};
|
||||
|
||||
return axios.get(`${login.api}/users.getStatus`, options);
|
||||
};
|
||||
};
|
|
@ -1,21 +1,22 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('Chat notifyIssue()', () => {
|
||||
const ctx = {req: {accessToken: {userId: 1}}};
|
||||
ctx.req.__ = value => {
|
||||
return value;
|
||||
};
|
||||
const chatModel = models.Chat;
|
||||
const osTicketModel = models.OsTicket;
|
||||
const chatModel = app.models.Chat;
|
||||
const osTicketModel = app.models.OsTicket;
|
||||
const departmentId = 31;
|
||||
|
||||
it(`should not call to the send() method and neither return a response`, async() => {
|
||||
spyOn(chatModel, 'send').and.callThrough();
|
||||
spyOn(osTicketModel, 'rawSql').and.returnValue([]);
|
||||
|
||||
await chatModel.notifyIssues(ctx);
|
||||
const response = await chatModel.notifyIssues(ctx);
|
||||
|
||||
expect(chatModel.send).not.toHaveBeenCalled();
|
||||
expect(response).toBeUndefined();
|
||||
});
|
||||
|
||||
it(`should return a response calling the send() method`, async() => {
|
||||
|
@ -26,15 +27,16 @@ describe('Chat notifyIssue()', () => {
|
|||
username: 'batman',
|
||||
subject: 'Issue title'}
|
||||
]);
|
||||
// eslint-disable-next-line max-len
|
||||
const expectedMessage = `@all ➔ There's a new urgent ticket:\r\n[ID: 00001 - Issue title @batman](https://cau.verdnatura.es/scp/tickets.php?id=1)`;
|
||||
const expectedMessage = `@all ➔ There's a new urgent ticket:\r\n[ID: *00001* - Issue title (@batman)](https://cau.verdnatura.es/scp/tickets.php?id=1)`;
|
||||
|
||||
const department = await models.Department.findById(departmentId);
|
||||
const department = await app.models.Department.findById(departmentId);
|
||||
let orgChatName = department.chatName;
|
||||
await department.updateAttribute('chatName', 'IT');
|
||||
|
||||
await chatModel.notifyIssues(ctx);
|
||||
const response = await chatModel.notifyIssues(ctx);
|
||||
|
||||
expect(response.statusCode).toEqual(200);
|
||||
expect(response.message).toEqual('Fake notification sent');
|
||||
expect(chatModel.send).toHaveBeenCalledWith(ctx, '#IT', expectedMessage);
|
||||
|
||||
// restores
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('Chat send()', () => {
|
||||
it('should return true as response', async() => {
|
||||
it('should return a "Fake notification sent" as response', async() => {
|
||||
let ctx = {req: {accessToken: {userId: 1}}};
|
||||
let response = await models.Chat.send(ctx, '@salesperson', 'I changed something');
|
||||
let response = await app.models.Chat.send(ctx, '@salesPerson', 'I changed something');
|
||||
|
||||
expect(response).toEqual(true);
|
||||
expect(response.statusCode).toEqual(200);
|
||||
expect(response.message).toEqual('Fake notification sent');
|
||||
});
|
||||
|
||||
it('should return false as response', async() => {
|
||||
it('should retrun false as response', async() => {
|
||||
let ctx = {req: {accessToken: {userId: 18}}};
|
||||
let response = await models.Chat.send(ctx, '@salesperson', 'I changed something');
|
||||
let response = await app.models.Chat.send(ctx, '@salesPerson', 'I changed something');
|
||||
|
||||
expect(response).toEqual(false);
|
||||
expect(response).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,21 +1,46 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('Chat sendCheckingPresence()', () => {
|
||||
it('should return true as response', async() => {
|
||||
const workerId = 1107;
|
||||
const today = new Date();
|
||||
today.setHours(6, 0);
|
||||
const ctx = {req: {accessToken: {userId: 1}}};
|
||||
const chatModel = app.models.Chat;
|
||||
const departmentId = 23;
|
||||
const workerId = 1107;
|
||||
|
||||
let ctx = {req: {accessToken: {userId: 1}}};
|
||||
let response = await models.Chat.sendCheckingPresence(ctx, workerId, 'I changed something');
|
||||
it(`should call send() method with the worker name if he's currently working then return a response`, async() => {
|
||||
spyOn(chatModel, 'send').and.callThrough();
|
||||
|
||||
expect(response).toEqual(true);
|
||||
const timeEntry = await app.models.WorkerTimeControl.create({
|
||||
userFk: workerId,
|
||||
timed: today,
|
||||
manual: false,
|
||||
direction: 'in'
|
||||
});
|
||||
|
||||
const response = await chatModel.sendCheckingPresence(ctx, workerId, 'I changed something');
|
||||
|
||||
expect(response.statusCode).toEqual(200);
|
||||
expect(response.message).toEqual('Fake notification sent');
|
||||
expect(chatModel.send).toHaveBeenCalledWith(ctx, '@HankPym', 'I changed something');
|
||||
|
||||
// restores
|
||||
await app.models.WorkerTimeControl.destroyById(timeEntry.id);
|
||||
});
|
||||
|
||||
it('should return false as response', async() => {
|
||||
const salesPersonId = 18;
|
||||
it(`should call to send() method with the worker department channel if he's not currently working then return a response`, async() => {
|
||||
spyOn(chatModel, 'send').and.callThrough();
|
||||
|
||||
let ctx = {req: {accessToken: {userId: 18}}};
|
||||
let response = await models.Chat.sendCheckingPresence(ctx, salesPersonId, 'I changed something');
|
||||
const department = await app.models.Department.findById(departmentId);
|
||||
await department.updateAttribute('chatName', 'cooler');
|
||||
|
||||
expect(response).toEqual(false);
|
||||
const response = await chatModel.sendCheckingPresence(ctx, workerId, 'I changed something');
|
||||
|
||||
expect(response.statusCode).toEqual(200);
|
||||
expect(response.message).toEqual('Fake notification sent');
|
||||
expect(chatModel.send).toHaveBeenCalledWith(ctx, '#cooler', '@HankPym ➔ I changed something');
|
||||
|
||||
// restores
|
||||
await department.updateAttribute('chatName', null);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('Chat sendCheckingPresence()', () => {
|
||||
const today = Date.vnNew();
|
||||
today.setHours(6, 0);
|
||||
const chatModel = models.Chat;
|
||||
|
||||
it(`should call to sendCheckingUserStatus()`, async() => {
|
||||
spyOn(chatModel, 'sendCheckingUserStatus').and.callThrough();
|
||||
|
||||
const chat = {
|
||||
checkUserStatus: 1,
|
||||
status: 'pending',
|
||||
attempts: 0
|
||||
};
|
||||
|
||||
await chatModel.destroyAll();
|
||||
await chatModel.create(chat);
|
||||
|
||||
await chatModel.sendQueued();
|
||||
|
||||
expect(chatModel.sendCheckingUserStatus).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it(`should call to sendMessage() method`, async() => {
|
||||
spyOn(chatModel, 'sendMessage').and.callThrough();
|
||||
|
||||
const chat = {
|
||||
checkUserStatus: 0,
|
||||
status: 'pending',
|
||||
attempts: 0
|
||||
};
|
||||
|
||||
await chatModel.destroyAll();
|
||||
await chatModel.create(chat);
|
||||
|
||||
await chatModel.sendQueued();
|
||||
|
||||
expect(chatModel.sendMessage).toHaveBeenCalled();
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('assign', {
|
||||
description: 'Assign a collection',
|
||||
accessType: 'WRITE',
|
||||
http: {
|
||||
path: `/assign`,
|
||||
verb: 'POST'
|
||||
},
|
||||
returns: {
|
||||
type: ['object'],
|
||||
root: true
|
||||
},
|
||||
});
|
||||
|
||||
Self.assign = async(ctx, options) => {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const myOptions = {userId};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
const randStr = Math.random().toString(36).substring(3);
|
||||
const result = await Self.rawSql(`
|
||||
CALL vn.collection_assign(?, @vCollectionFk);
|
||||
SELECT @vCollectionFk ?
|
||||
`, [userId, randStr], myOptions);
|
||||
|
||||
// Por si entra en SELECT FOR UPDATE una o varias veces
|
||||
const collectionFk = result.find(item => item[0]?.[randStr] !== undefined)?.[0]?.[randStr];
|
||||
|
||||
if (!collectionFk) throw new UserError('There are not picking tickets');
|
||||
await Self.rawSql('CALL vn.collection_printSticker(?, NULL)', [collectionFk], myOptions);
|
||||
|
||||
return collectionFk;
|
||||
};
|
||||
};
|
|
@ -1,36 +0,0 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('assignCollection', {
|
||||
description: 'Assign a collection',
|
||||
accessType: 'WRITE',
|
||||
http: {
|
||||
path: `/assignCollection`,
|
||||
verb: 'POST'
|
||||
},
|
||||
returns: {
|
||||
type: ['object'],
|
||||
root: true
|
||||
},
|
||||
});
|
||||
|
||||
Self.assignCollection = async(ctx, options) => {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const myOptions = {userId};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
const randStr = Math.random().toString(36).substring(3);
|
||||
const result = await Self.rawSql(`
|
||||
CALL vn.collection_getAssigned(?, @vCollectionFk);
|
||||
SELECT @vCollectionFk ?
|
||||
`, [userId, randStr], myOptions);
|
||||
|
||||
const collectionFk = result.find(item => item[0]?.[randStr] !== undefined)?.[0]?.[randStr];
|
||||
|
||||
if (!collectionFk) throw new UserError('There are not picking tickets');
|
||||
await Self.rawSql('CALL vn.collection_printSticker(?, NULL)', [collectionFk], myOptions);
|
||||
|
||||
return collectionFk;
|
||||
};
|
||||
};
|
|
@ -0,0 +1,35 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('collectionFaults', {
|
||||
description: 'Update sale of a collection',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'shelvingFk',
|
||||
type: 'String',
|
||||
required: true,
|
||||
description: 'The shalving id'
|
||||
}, {
|
||||
arg: 'quantity',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The quantity to sale'
|
||||
}, {
|
||||
arg: 'itemFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The ticket id'
|
||||
}],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/collectionFaults`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.collectionFaults = async(shelvingFk, quantity, itemFk) => {
|
||||
query = `CALL vn.collection_faults(?,?,?)`;
|
||||
return await Self.rawSql(query, [shelvingFk, quantity, itemFk]);
|
||||
};
|
||||
};
|
|
@ -1,142 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('getSales', {
|
||||
description: 'Get sales from ticket, collection or sectorCollection',
|
||||
accessType: 'READ',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'collectionOrTicketFk',
|
||||
type: 'number',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'print',
|
||||
type: 'boolean',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'source',
|
||||
type: 'string',
|
||||
required: true
|
||||
},
|
||||
|
||||
],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/getSales`,
|
||||
verb: 'GET'
|
||||
},
|
||||
});
|
||||
|
||||
Self.getSales = async(ctx, collectionOrTicketFk, print, source, options) => {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const myOptions = {userId};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
const [{id}] = await Self.rawSql('SELECT vn.ticket_get(?) as id',
|
||||
[collectionOrTicketFk],
|
||||
myOptions);
|
||||
|
||||
const [tickets] = await Self.rawSql('CALL vn.collection_getTickets(?)', [id], myOptions);
|
||||
|
||||
if (source) {
|
||||
await Self.rawSql(
|
||||
'CALL vn.ticketStateToday_setState(?,?)', [id, source], myOptions
|
||||
);
|
||||
}
|
||||
|
||||
const [sales] = await Self.rawSql('CALL vn.sale_getFromTicketOrCollection(?)',
|
||||
[id], myOptions);
|
||||
|
||||
const isPicker = source != 'CHECKER';
|
||||
const [placements] = await Self.rawSql('CALL vn.collectionPlacement_get(?, ?)',
|
||||
[id, isPicker], myOptions
|
||||
);
|
||||
|
||||
if (print) await Self.rawSql('CALL vn.collection_printSticker(?,NULL)', [id], myOptions);
|
||||
|
||||
return getCollection(id, tickets, sales, placements, myOptions);
|
||||
};
|
||||
|
||||
async function getCollection(id, tickets, sales, placements, options) {
|
||||
const collection = {
|
||||
collectionFk: id,
|
||||
tickets: [],
|
||||
};
|
||||
for (let ticket of tickets) {
|
||||
const {ticketFk} = ticket;
|
||||
ticket.sales = [];
|
||||
|
||||
const barcodes = await getBarcodes(ticketFk, options);
|
||||
await Self.rawSql(
|
||||
'CALL util.log_add(?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
['vn', 'ticket', 'Ticket', ticketFk, ticketFk, 'select', null, null],
|
||||
options
|
||||
);
|
||||
|
||||
for (let sale of sales) {
|
||||
if (sale.ticketFk == ticketFk) {
|
||||
sale.placements = [];
|
||||
for (const salePlacement of placements) {
|
||||
if (salePlacement.saleFk == sale.saleFk && salePlacement.order) {
|
||||
const placement = {
|
||||
saleFk: salePlacement.saleFk,
|
||||
itemFk: salePlacement.itemFk,
|
||||
placement: salePlacement.placement,
|
||||
shelving: salePlacement.shelving,
|
||||
created: salePlacement.created,
|
||||
visible: salePlacement.visible,
|
||||
order: salePlacement.order,
|
||||
grouping: salePlacement.grouping,
|
||||
priority: salePlacement.priority,
|
||||
saleOrder: salePlacement.saleOrder,
|
||||
isPreviousPrepared: salePlacement.isPreviousPrepared,
|
||||
itemShelvingSaleFk: salePlacement.itemShelvingSaleFk,
|
||||
ticketFk: salePlacement.ticketFk,
|
||||
id: salePlacement.id
|
||||
};
|
||||
sale.placements.push(placement);
|
||||
}
|
||||
}
|
||||
|
||||
sale.barcodes = [];
|
||||
for (const barcode of barcodes) {
|
||||
if (barcode.movementId == sale.saleFk) {
|
||||
if (barcode.code) {
|
||||
sale.barcodes.push(barcode.code);
|
||||
sale.barcodes.push(`0 ${barcode.code}`);
|
||||
}
|
||||
|
||||
if (barcode.id) {
|
||||
sale.barcodes.push(barcode.id);
|
||||
sale.barcodes.push(`0 ${barcode.id}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ticket.sales.push(sale);
|
||||
}
|
||||
}
|
||||
collection.tickets.push(ticket);
|
||||
}
|
||||
|
||||
return collection;
|
||||
}
|
||||
|
||||
async function getBarcodes(ticketId, options) {
|
||||
const query =
|
||||
`SELECT s.id movementId,
|
||||
b.code,
|
||||
c.id
|
||||
FROM vn.sale s
|
||||
LEFT JOIN vn.itemBarcode b ON b.itemFk = s.itemFk
|
||||
LEFT JOIN vn.buy c ON c.itemFk = s.itemFk
|
||||
LEFT JOIN vn.entry e ON e.id = c.entryFk
|
||||
LEFT JOIN vn.travel tr ON tr.id = e.travelFk
|
||||
WHERE s.ticketFk = ?
|
||||
AND tr.landed >= DATE_SUB(CURDATE(), INTERVAL 1 YEAR)`;
|
||||
return Self.rawSql(query, [ticketId], options);
|
||||
}
|
||||
};
|
|
@ -0,0 +1,20 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('getSectors', {
|
||||
description: 'Get all sectors',
|
||||
accessType: 'READ',
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/getSectors`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.getSectors = async() => {
|
||||
const query = `CALL vn.sector_get()`;
|
||||
const [result] = await Self.rawSql(query);
|
||||
return result;
|
||||
};
|
||||
};
|
|
@ -1,179 +0,0 @@
|
|||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('getTickets', {
|
||||
description: 'Make a new collection of tickets',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'id',
|
||||
type: 'number',
|
||||
description: 'The collection id',
|
||||
required: true,
|
||||
http: {source: 'path'}
|
||||
}, {
|
||||
arg: 'print',
|
||||
type: 'boolean',
|
||||
description: 'True if you want to print'
|
||||
}],
|
||||
returns: {
|
||||
type: ['object'],
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/:id/getTickets`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.getTickets = async(ctx, id, print, options) => {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const url = await Self.app.models.Url.getUrl();
|
||||
const $t = ctx.req.__;
|
||||
const myOptions = {};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
myOptions.userId = userId;
|
||||
|
||||
const promises = [];
|
||||
const [tickets] = await Self.rawSql(`CALL vn.collection_getTickets(?)`, [id], myOptions);
|
||||
|
||||
const sales = await Self.rawSql(`
|
||||
SELECT s.ticketFk,
|
||||
sgd.saleGroupFk,
|
||||
s.id saleFk,
|
||||
s.itemFk,
|
||||
i.longName,
|
||||
i.size,
|
||||
ic.color,
|
||||
o.code origin,
|
||||
ish.packing,
|
||||
ish.grouping,
|
||||
s.isAdded,
|
||||
s.originalQuantity,
|
||||
s.quantity saleQuantity,
|
||||
iss.quantity reservedQuantity,
|
||||
SUM(iss.quantity) OVER (PARTITION BY s.id ORDER BY ish.id) accumulatedQuantity,
|
||||
ROW_NUMBER () OVER (PARTITION BY s.id ORDER BY pickingOrder) currentItemShelving,
|
||||
COUNT(*) OVER (PARTITION BY s.id ORDER BY s.id) totalItemShelving,
|
||||
sh.code,
|
||||
p2.code parkingCodePrevia,
|
||||
p2.pickingOrder pickingOrderPrevia,
|
||||
p.code parkingCode,
|
||||
p.pickingOrder pickingOrder,
|
||||
iss.id itemShelvingSaleFk,
|
||||
iss.isPicked,
|
||||
iss.itemShelvingFk
|
||||
FROM ticketCollection tc
|
||||
LEFT JOIN collection c ON c.id = tc.collectionFk
|
||||
JOIN sale s ON s.ticketFk = tc.ticketFk
|
||||
LEFT JOIN saleGroupDetail sgd ON sgd.saleFk = s.id
|
||||
LEFT JOIN saleGroup sg ON sg.id = sgd.saleGroupFk
|
||||
LEFT JOIN parking p2 ON p2.id = sg.parkingFk
|
||||
JOIN item i ON i.id = s.itemFk
|
||||
JOIN itemShelvingSale iss ON iss.saleFk = s.id
|
||||
LEFT JOIN itemShelving ish ON ish.id = iss.itemShelvingFk
|
||||
LEFT JOIN shelving sh ON sh.id = ish.shelvingFk
|
||||
LEFT JOIN parking p ON p.id = sh.parkingFk
|
||||
LEFT JOIN itemColor ic ON ic.itemFk = s.itemFk
|
||||
LEFT JOIN origin o ON o.id = i.originFk
|
||||
WHERE tc.collectionFk = ?
|
||||
GROUP BY s.id, ish.id, p.code, p2.code
|
||||
UNION ALL
|
||||
SELECT s.ticketFk,
|
||||
sgd.saleGroupFk,
|
||||
s.id saleFk,
|
||||
s.itemFk,
|
||||
i.longName,
|
||||
i.size,
|
||||
ic.color,
|
||||
o.code origin,
|
||||
ish.packing,
|
||||
ish.grouping,
|
||||
s.isAdded,
|
||||
s.originalQuantity,
|
||||
s.quantity,
|
||||
iss.quantity,
|
||||
SUM(iss.quantity) OVER (PARTITION BY s.id ORDER BY ish.id),
|
||||
ROW_NUMBER () OVER (PARTITION BY s.id ORDER BY p.pickingOrder),
|
||||
COUNT(*) OVER (PARTITION BY s.id ORDER BY s.id) ,
|
||||
sh.code,
|
||||
p2.code,
|
||||
p2.pickingOrder,
|
||||
p.code,
|
||||
p.pickingOrder,
|
||||
iss.id itemShelvingSaleFk,
|
||||
iss.isPicked,
|
||||
iss.itemShelvingFk
|
||||
FROM sectorCollection sc
|
||||
JOIN sectorCollectionSaleGroup ss ON ss.sectorCollectionFk = sc.id
|
||||
JOIN saleGroup sg ON sg.id = ss.saleGroupFk
|
||||
LEFT JOIN saleGroupDetail sgd ON sgd.saleGroupFk = sg.id
|
||||
JOIN sale s ON s.id = sgd.saleFk
|
||||
LEFT JOIN parking p2 ON p2.id = sg.parkingFk
|
||||
JOIN item i ON i.id = s.itemFk
|
||||
JOIN itemShelvingSale iss ON iss.saleFk = s.id
|
||||
LEFT JOIN itemShelving ish ON ish.id = iss.itemShelvingFk
|
||||
LEFT JOIN shelving sh ON sh.id = ish.shelvingFk
|
||||
LEFT JOIN parking p ON p.id = sh.parkingFk
|
||||
LEFT JOIN itemColor ic ON ic.itemFk = s.itemFk
|
||||
LEFT JOIN origin o ON o.id = i.originFk
|
||||
WHERE sc.id = ?
|
||||
AND sgd.saleGroupFk
|
||||
GROUP BY s.id, ish.id, p.code, p2.code`, [id, id], myOptions);
|
||||
if (print)
|
||||
await Self.rawSql(`CALL vn.collection_printSticker(?, ?)`, [id, null], myOptions);
|
||||
|
||||
const collection = {collectionFk: id, tickets: []};
|
||||
|
||||
if (tickets && tickets.length) {
|
||||
for (const ticket of tickets) {
|
||||
const ticketId = ticket.ticketFk;
|
||||
if (ticket.observation) {
|
||||
for (observation of ticket.observation?.split(' ')) {
|
||||
if (['#', '@'].includes(observation.charAt(0))) {
|
||||
promises.push(Self.app.models.Chat.send(ctx, observation,
|
||||
$t('The ticket is in preparation', {
|
||||
ticketId: ticketId,
|
||||
ticketUrl: `${url}ticket/${ticketId}/summary`,
|
||||
salesPersonId: ticket.salesPersonFk
|
||||
})));
|
||||
}
|
||||
}
|
||||
}
|
||||
if (sales && sales.length) {
|
||||
const barcodes = await Self.rawSql(`
|
||||
SELECT s.id saleFk, b.code, c.id
|
||||
FROM sale s
|
||||
LEFT JOIN itemBarcode b ON b.itemFk = s.itemFk
|
||||
LEFT JOIN buy c ON c.itemFk = s.itemFk
|
||||
LEFT JOIN entry e ON e.id = c.entryFk
|
||||
LEFT JOIN travel tr ON tr.id = e.travelFk
|
||||
WHERE s.ticketFk = ?
|
||||
AND tr.landed >= util.VN_CURDATE() - INTERVAL 1 YEAR`,
|
||||
[ticketId], myOptions);
|
||||
ticket.sales = [];
|
||||
for (const sale of sales) {
|
||||
if (sale.ticketFk === ticketId) {
|
||||
sale.Barcodes = [];
|
||||
if (barcodes && barcodes.length) {
|
||||
for (const barcode of barcodes) {
|
||||
if (barcode.saleFk === sale.saleFk) {
|
||||
for (const prop in barcode) {
|
||||
if (['id', 'code'].includes(prop) && barcode[prop])
|
||||
sale.Barcodes.push(barcode[prop].toString(), '0' + barcode[prop]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ticket.sales.push(sale);
|
||||
}
|
||||
}
|
||||
}
|
||||
collection.tickets.push(ticket);
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
return collection;
|
||||
};
|
||||
};
|
|
@ -0,0 +1,133 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('newCollection', {
|
||||
description: 'Make a new collection of tickets',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'collectionFk',
|
||||
type: 'Number',
|
||||
required: false,
|
||||
description: 'The collection id'
|
||||
}, {
|
||||
arg: 'sectorFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The sector of worker'
|
||||
}, {
|
||||
arg: 'vWagons',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The number of wagons'
|
||||
}],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/newCollection`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.newCollection = async(ctx, collectionFk, sectorFk, vWagons) => {
|
||||
let query = '';
|
||||
|
||||
if (!collectionFk) {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
query = `CALL vn.collectionTrain_newBeta(?,?,?)`;
|
||||
const [result] = await Self.rawSql(query, [sectorFk, vWagons, userId]);
|
||||
if (result.length == 0)
|
||||
throw new Error(`No collections for today`);
|
||||
|
||||
collectionFk = result[0].vCollectionFk;
|
||||
}
|
||||
|
||||
query = `CALL vn.collectionTicket_get(?)`;
|
||||
const [tickets] = await Self.rawSql(query, [collectionFk]);
|
||||
|
||||
query = `CALL vn.collectionSale_get(?)`;
|
||||
const [sales] = await Self.rawSql(query, [collectionFk]);
|
||||
|
||||
query = `CALL vn.collectionPlacement_get(?)`;
|
||||
const [placements] = await Self.rawSql(query, [collectionFk]);
|
||||
|
||||
query = `CALL vn.collectionSticker_print(?,?)`;
|
||||
await Self.rawSql(query, [collectionFk, sectorFk]);
|
||||
|
||||
return makeCollection(tickets, sales, placements, collectionFk);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a collection json
|
||||
* @param {*} tickets - Request tickets
|
||||
* @param {*} sales - Request sales
|
||||
* @param {*} placements - Request placements
|
||||
* @param {*} collectionFk - Request placements
|
||||
* @return {Object} Collection JSON
|
||||
*/
|
||||
async function makeCollection(tickets, sales, placements, collectionFk) {
|
||||
let collection = [];
|
||||
|
||||
for (let i = 0; i < tickets.length; i++) {
|
||||
let ticket = {};
|
||||
ticket['ticketFk'] = tickets[i]['ticketFk'];
|
||||
ticket['level'] = tickets[i]['level'];
|
||||
ticket['agencyName'] = tickets[i]['agencyName'];
|
||||
ticket['warehouseFk'] = tickets[i]['warehouseFk'];
|
||||
ticket['salesPersonFk'] = tickets[i]['salesPersonFk'];
|
||||
|
||||
let ticketSales = [];
|
||||
|
||||
for (let x = 0; x < sales.length; x++) {
|
||||
if (sales[x]['ticketFk'] == ticket['ticketFk']) {
|
||||
let sale = {};
|
||||
sale['collectionFk'] = collectionFk;
|
||||
sale['ticketFk'] = sales[x]['ticketFk'];
|
||||
sale['saleFk'] = sales[x]['saleFk'];
|
||||
sale['itemFk'] = sales[x]['itemFk'];
|
||||
sale['quantity'] = sales[x]['quantity'];
|
||||
if (sales[x]['quantityPicked'] != null)
|
||||
sale['quantityPicked'] = sales[x]['quantityPicked'];
|
||||
else
|
||||
sale['quantityPicked'] = 0;
|
||||
sale['longName'] = sales[x]['longName'];
|
||||
sale['size'] = sales[x]['size'];
|
||||
sale['color'] = sales[x]['color'];
|
||||
sale['discount'] = sales[x]['discount'];
|
||||
sale['price'] = sales[x]['price'];
|
||||
sale['stems'] = sales[x]['stems'];
|
||||
sale['category'] = sales[x]['category'];
|
||||
sale['origin'] = sales[x]['origin'];
|
||||
sale['clientFk'] = sales[x]['clientFk'];
|
||||
sale['productor'] = sales[x]['productor'];
|
||||
sale['reserved'] = sales[x]['reserved'];
|
||||
sale['isPreviousPrepared'] = sales[x]['isPreviousPrepared'];
|
||||
sale['isPrepared'] = sales[x]['isPrepared'];
|
||||
sale['isControlled'] = sales[x]['isControlled'];
|
||||
|
||||
let salePlacements = [];
|
||||
|
||||
for (let z = 0; z < placements.length; z++) {
|
||||
if (placements[z]['saleFk'] == sale['saleFk']) {
|
||||
let placement = {};
|
||||
placement['saleFk'] = placements[z]['saleFk'];
|
||||
placement['itemFk'] = placements[z]['itemFk'];
|
||||
placement['placement'] = placements[z]['placement'];
|
||||
placement['shelving'] = placements[z]['shelving'];
|
||||
placement['created'] = placements[z]['created'];
|
||||
placement['visible'] = placements[z]['visible'];
|
||||
placement['order'] = placements[z]['order'];
|
||||
placement['grouping'] = placements[z]['grouping'];
|
||||
salePlacements.push(placement);
|
||||
}
|
||||
}
|
||||
sale['placements'] = salePlacements;
|
||||
ticketSales.push(sale);
|
||||
}
|
||||
}
|
||||
ticket['sales'] = ticketSales;
|
||||
collection.push(ticket);
|
||||
}
|
||||
|
||||
return collection;
|
||||
}
|
||||
};
|
|
@ -1,35 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('previousLabel', {
|
||||
description: 'Returns the previa label pdf',
|
||||
accessType: 'READ',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'number',
|
||||
required: true,
|
||||
description: 'The item id',
|
||||
http: {source: 'path'}
|
||||
}],
|
||||
returns: [
|
||||
{
|
||||
arg: 'body',
|
||||
type: 'file',
|
||||
root: true
|
||||
}, {
|
||||
arg: 'Content-Type',
|
||||
type: 'String',
|
||||
http: {target: 'header'}
|
||||
}, {
|
||||
arg: 'Content-Disposition',
|
||||
type: 'String',
|
||||
http: {target: 'header'}
|
||||
}
|
||||
],
|
||||
http: {
|
||||
path: '/:id/previousLabel',
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.previousLabel = (ctx, id) => Self.printReport(ctx, id, 'previa-label');
|
||||
};
|
|
@ -1,54 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('setSaleQuantity', {
|
||||
description: 'Update sale quantity',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'saleId',
|
||||
type: 'number',
|
||||
required: true,
|
||||
description: 'The sale id'
|
||||
},
|
||||
{
|
||||
arg: 'quantity',
|
||||
type: 'number',
|
||||
required: true,
|
||||
description: 'The quantity to picked'
|
||||
}],
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/setSaleQuantity`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.setSaleQuantity = async(saleId, quantity, options) => {
|
||||
const models = Self.app.models;
|
||||
const myOptions = {};
|
||||
let tx;
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
if (!myOptions.transaction) {
|
||||
tx = await Self.beginTransaction({});
|
||||
myOptions.transaction = tx;
|
||||
}
|
||||
|
||||
try {
|
||||
const sale = await models.Sale.findById(saleId, null, myOptions);
|
||||
const saleUpdated = await sale.updateAttributes({
|
||||
quantity
|
||||
}, myOptions);
|
||||
|
||||
if (tx) await tx.commit();
|
||||
|
||||
return saleUpdated;
|
||||
} catch (e) {
|
||||
if (tx) await tx.rollback();
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
};
|
|
@ -1,39 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const LoopBackContext = require('loopback-context');
|
||||
|
||||
describe('ticket assign()', () => {
|
||||
let ctx;
|
||||
let options;
|
||||
let tx;
|
||||
beforeEach(async() => {
|
||||
ctx = {
|
||||
req: {
|
||||
accessToken: {userId: 1106},
|
||||
headers: {origin: 'http://localhost'},
|
||||
__: value => value
|
||||
},
|
||||
args: {}
|
||||
};
|
||||
|
||||
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
|
||||
active: ctx.req
|
||||
});
|
||||
|
||||
options = {transaction: tx};
|
||||
tx = await models.Sale.beginTransaction({});
|
||||
options.transaction = tx;
|
||||
});
|
||||
|
||||
afterEach(async() => {
|
||||
await tx.rollback();
|
||||
});
|
||||
|
||||
it('should throw an error when there are no picking tickets', async() => {
|
||||
try {
|
||||
await models.Collection.assign(ctx, options);
|
||||
fail('Expected an error to be thrown, but none was thrown.');
|
||||
} catch (e) {
|
||||
expect(e.message).toEqual('There are not picking tickets');
|
||||
}
|
||||
});
|
||||
});
|
|
@ -1,36 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const LoopBackContext = require('loopback-context');
|
||||
|
||||
describe('ticket assignCollection()', () => {
|
||||
let ctx;
|
||||
let options;
|
||||
let tx;
|
||||
beforeEach(async() => {
|
||||
ctx = {
|
||||
req: {
|
||||
accessToken: {userId: 1106},
|
||||
headers: {origin: 'http://localhost'},
|
||||
__: value => value
|
||||
},
|
||||
args: {}
|
||||
};
|
||||
|
||||
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({active: ctx.req});
|
||||
|
||||
options = {transaction: tx};
|
||||
tx = await models.Sale.beginTransaction({});
|
||||
options.transaction = tx;
|
||||
});
|
||||
|
||||
afterEach(async() => {
|
||||
if (tx) await tx.rollback();
|
||||
});
|
||||
|
||||
it('should throw an error when there is not picking tickets', async() => {
|
||||
try {
|
||||
await models.Collection.assignCollection(ctx, options);
|
||||
} catch (e) {
|
||||
expect(e.message).toEqual('There are not picking tickets');
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,9 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
describe('collectionFaults()', () => {
|
||||
it('return shelving afected', async() => {
|
||||
let response = await app.models.Collection.collectionFaults('UXN', 0, 1);
|
||||
|
||||
expect(response.length).toBeGreaterThan(0);
|
||||
expect(response[0][0].shelvingFk).toEqual('UXN');
|
||||
});
|
||||
});
|
|
@ -2,10 +2,10 @@ const models = require('vn-loopback/server/server').models;
|
|||
|
||||
describe('ticket getCollection()', () => {
|
||||
it('should return a list of collections', async() => {
|
||||
let ctx = {req: {accessToken: {userId: 1107}}};
|
||||
let ctx = {req: {accessToken: {userId: 1106}}};
|
||||
let response = await models.Collection.getCollection(ctx);
|
||||
|
||||
expect(response.length).toBeGreaterThan(0);
|
||||
expect(response[0].collectionFk).toEqual(3);
|
||||
expect(response[0].collectionFk).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,54 +0,0 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
|
||||
describe('collection getSales()', () => {
|
||||
const collectionOrTicketFk = 999999;
|
||||
const print = true;
|
||||
const source = 'CHECKER';
|
||||
const ctx = beforeAll.getCtx();
|
||||
|
||||
it('should return a collection with tickets, placements and barcodes settled correctly', async() => {
|
||||
const tx = await models.Collection.beginTransaction({});
|
||||
const options = {transaction: tx};
|
||||
try {
|
||||
const collection = await models.Collection.getSales(ctx,
|
||||
collectionOrTicketFk, print, source, options);
|
||||
|
||||
const [firstTicket] = collection.tickets;
|
||||
const [firstSale] = firstTicket.sales;
|
||||
const [firstPlacement] = firstSale.placements;
|
||||
|
||||
expect(collection.tickets.length).toBeTruthy();
|
||||
expect(collection.collectionFk).toEqual(firstTicket.ticketFk);
|
||||
|
||||
expect(firstSale.ticketFk).toEqual(firstTicket.ticketFk);
|
||||
expect(firstSale.placements.length).toBeTruthy();
|
||||
expect(firstSale.barcodes.length).toBeTruthy();
|
||||
|
||||
expect(firstSale.saleFk).toEqual(firstPlacement.saleFk);
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
await tx.rollback();
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
|
||||
it('should print a sticker', async() => {
|
||||
const tx = await models.Collection.beginTransaction({});
|
||||
const options = {transaction: tx};
|
||||
const query = 'SELECT * FROM printQueue pq JOIN printQueueArgs pqa ON pqa.printQueueFk = pq.id';
|
||||
try {
|
||||
const printQueueBefore = await models.Collection.rawSql(
|
||||
query, [], options);
|
||||
await models.Collection.getSales(ctx,
|
||||
collectionOrTicketFk, true, source, options);
|
||||
const printQueueAfter = await models.Collection.rawSql(
|
||||
query, [], options);
|
||||
|
||||
expect(printQueueAfter.length).toEqual(printQueueBefore.length + 1);
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
await tx.rollback();
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,11 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('getSectors()', () => {
|
||||
it('return list of sectors', async() => {
|
||||
let response = await app.models.Collection.getSectors();
|
||||
|
||||
expect(response.length).toBeGreaterThan(0);
|
||||
expect(response[0].id).toEqual(1);
|
||||
expect(response[0].description).toEqual('First sector');
|
||||
});
|
||||
});
|
|
@ -1,31 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('collection getTickets()', () => {
|
||||
const ctx = beforeAll.getCtx();
|
||||
|
||||
it('should get tickets, sales and barcodes from collection', async() => {
|
||||
const tx = await models.Collection.beginTransaction({});
|
||||
|
||||
try {
|
||||
const options = {transaction: tx};
|
||||
const collectionId = 1;
|
||||
|
||||
const collectionTickets = await models.Collection.getTickets(ctx, collectionId, null, options);
|
||||
|
||||
expect(collectionTickets.collectionFk).toEqual(collectionId);
|
||||
expect(collectionTickets.tickets.length).toEqual(3);
|
||||
expect(collectionTickets.tickets[0].ticketFk).toEqual(1);
|
||||
expect(collectionTickets.tickets[1].ticketFk).toEqual(2);
|
||||
expect(collectionTickets.tickets[2].ticketFk).toEqual(23);
|
||||
expect(collectionTickets.tickets[0].sales[0].ticketFk).toEqual(1);
|
||||
expect(collectionTickets.tickets[1].sales.length).toEqual(0);
|
||||
expect(collectionTickets.tickets[2].sales.length).toEqual(0);
|
||||
expect(collectionTickets.tickets[0].sales[0].Barcodes.length).toBeTruthy();
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
await tx.rollback();
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,12 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
// #3400 analizar que hacer con rutas de back colletion
|
||||
xdescribe('newCollection()', () => {
|
||||
it('return a new collection', async() => {
|
||||
let ctx = {req: {accessToken: {userId: 1106}}};
|
||||
let response = await app.models.Collection.newCollection(ctx, 1, 1, 1);
|
||||
|
||||
expect(response.length).toBeGreaterThan(0);
|
||||
expect(response[0].ticketFk).toEqual(2);
|
||||
});
|
||||
});
|
|
@ -1,37 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('setSaleQuantity()', () => {
|
||||
beforeAll.mockLoopBackContext();
|
||||
|
||||
it('should change quantity sale', async() => {
|
||||
const tx = await models.Ticket.beginTransaction({});
|
||||
spyOn(models.Sale, 'rawSql').and.callFake((sqlStatement, params, options) => {
|
||||
if (sqlStatement.includes('catalog_calcFromItem')) {
|
||||
sqlStatement = `CREATE OR REPLACE TEMPORARY TABLE tmp.ticketCalculateItem ENGINE = MEMORY
|
||||
SELECT 100 as available;`;
|
||||
params = null;
|
||||
}
|
||||
return models.Ticket.rawSql(sqlStatement, params, options);
|
||||
});
|
||||
|
||||
try {
|
||||
const options = {transaction: tx};
|
||||
|
||||
const saleId = 30;
|
||||
const newQuantity = 10;
|
||||
|
||||
const originalSale = await models.Sale.findById(saleId, null, options);
|
||||
|
||||
await models.Collection.setSaleQuantity(saleId, newQuantity, options);
|
||||
const updateSale = await models.Sale.findById(saleId, null, options);
|
||||
|
||||
expect(updateSale.quantity).not.toEqual(originalSale.quantity);
|
||||
expect(updateSale.quantity).toEqual(newQuantity);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
await tx.rollback();
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
});
|
|
@ -0,0 +1,62 @@
|
|||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('updateCollectionSale()', () => {
|
||||
it('should return a new collection', async() => {
|
||||
const sectorOneWarehouseID = 1;
|
||||
let ctx = {req: {accessToken: {userId: 1106}}};
|
||||
ctx.args = {
|
||||
sale: 1,
|
||||
originalQuantity: 5,
|
||||
quantity: 5,
|
||||
quantityPicked: 5,
|
||||
ticketFk: 1,
|
||||
stateFk: 4,
|
||||
isNicho: false,
|
||||
shelvingFk: 'UXN',
|
||||
itemFk: 1,
|
||||
sectorFk: 1
|
||||
};
|
||||
let originalSaleTracking = await app.models.SaleTracking.findOne({
|
||||
where: {
|
||||
saleFk: ctx.args.sale,
|
||||
stateFk: ctx.args.stateFk
|
||||
}
|
||||
});
|
||||
let itemPlacement = await app.models.ItemPlacement.findOne({
|
||||
where: {
|
||||
itemFk: ctx.args.itemFk,
|
||||
warehouseFk: sectorOneWarehouseID
|
||||
}
|
||||
});
|
||||
const originalSale = await app.models.Sale.findById(ctx.args.sale);
|
||||
const originalItemShelving = await app.models.ItemShelving.findOne({where: {shelvingFk: ctx.args.shelvingFk, itemFk: ctx.args.itemFk}});
|
||||
const originalTicketLastState = await app.models.TicketLastState.findById(ctx.args.ticketFk);
|
||||
|
||||
let response = await app.models.Collection.updateCollectionSale(ctx);
|
||||
|
||||
expect(response.length).toBeGreaterThan(0);
|
||||
expect(response[0][0].id).toEqual(1);
|
||||
expect(response[0][0].quantity).toEqual(5);
|
||||
|
||||
// restores
|
||||
if (originalSaleTracking)
|
||||
await originalSaleTracking.save();
|
||||
else {
|
||||
originalSaleTracking = await app.models.SaleTracking.findOne({
|
||||
where: {
|
||||
saleFk: ctx.args.sale,
|
||||
stateFk: ctx.args.stateFk
|
||||
}
|
||||
});
|
||||
await originalSaleTracking.destroy();
|
||||
}
|
||||
await originalSale.save();
|
||||
const itemShelvingsToDestroy = await app.models.ItemShelving.find({where: {shelvingFk: ctx.args.shelvingFk, itemFk: ctx.args.itemFk}});
|
||||
for (itemShelving of itemShelvingsToDestroy)
|
||||
await itemShelving.destroy();
|
||||
|
||||
await originalItemShelving.save();
|
||||
await originalTicketLastState.save();
|
||||
await itemPlacement.save();
|
||||
});
|
||||
});
|
|
@ -0,0 +1,90 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('updateCollectionSale', {
|
||||
description: 'Update sale of a collection',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'sale',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The sale id'
|
||||
}, {
|
||||
arg: 'originalQuantity',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The quantity to sale'
|
||||
},
|
||||
{
|
||||
arg: 'quantity',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The quantity to picked'
|
||||
},
|
||||
{
|
||||
arg: 'quantityPicked',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The quantity to picked'
|
||||
}, {
|
||||
arg: 'ticketFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The ticket id'
|
||||
}, {
|
||||
arg: 'stateFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The state id'
|
||||
}, {
|
||||
arg: 'isNicho',
|
||||
type: 'Boolean',
|
||||
required: true,
|
||||
description: 'Determine if sale is picked from nicho or not'
|
||||
}, {
|
||||
arg: 'shelvingFk',
|
||||
type: 'String',
|
||||
required: false,
|
||||
description: 'The shelving id'
|
||||
}, {
|
||||
arg: 'itemFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The item id'
|
||||
}, {
|
||||
arg: 'sectorFk',
|
||||
type: 'Number',
|
||||
required: true,
|
||||
description: 'The sector id'
|
||||
}],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/updateCollectionSale`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.updateCollectionSale = async ctx => {
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
const args = ctx.args;
|
||||
|
||||
if (args.originalQuantity == args.quantity) {
|
||||
query = `CALL vn.collection_updateSale(?,?,?,?,?)`;
|
||||
await Self.rawSql(query, [args.sale, args.originalQuantity, userId, args.stateFk, args.ticketFk]);
|
||||
}
|
||||
|
||||
if (!args.isNicho) {
|
||||
query = `CALL vn.collection_faults(?,?,?)`;
|
||||
await Self.rawSql(query, [args.shelvingFk, args.quantityPicked, args.itemFk]);
|
||||
} else {
|
||||
query = `CALL vn.sector_getWarehouse(?)`;
|
||||
const [result] = await Self.rawSql(query, [args.sectorFk]);
|
||||
|
||||
query = `CALL vn.itemPlacementSave(?,?,?)`;
|
||||
await Self.rawSql(query, [args.shelvingFk, args.quantityPicked, result[0]['warehouseFk']]);
|
||||
}
|
||||
query = `CALL vn.sale_updateOriginalQuantity(?,?)`;
|
||||
return await Self.rawSql(query, [args.sale, args.quantity]);
|
||||
};
|
||||
};
|
|
@ -1,69 +0,0 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('deleteTrashFiles', {
|
||||
description: 'Deletes files that have trash type',
|
||||
accessType: 'WRITE',
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/deleteTrashFiles`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.deleteTrashFiles = async options => {
|
||||
const myOptions = {};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
if (!isProduction())
|
||||
throw new UserError(`Action not allowed on the test environment`);
|
||||
|
||||
const models = Self.app.models;
|
||||
const DmsContainer = models.DmsContainer;
|
||||
|
||||
const trashDmsType = await models.DmsType.findOne({
|
||||
where: {code: 'trash'}
|
||||
}, myOptions);
|
||||
|
||||
const date = Date.vnNew();
|
||||
date.setMonth(date.getMonth() - 4);
|
||||
|
||||
const dmsToDelete = await models.Dms.find({
|
||||
where: {
|
||||
and: [
|
||||
{dmsTypeFk: trashDmsType.id},
|
||||
{created: {lt: date}}
|
||||
]
|
||||
}
|
||||
}, myOptions);
|
||||
|
||||
for (let dms of dmsToDelete) {
|
||||
const pathHash = DmsContainer.getHash(dms.id);
|
||||
const dmsContainer = await DmsContainer.container(pathHash);
|
||||
try {
|
||||
const dstFile = path.join(dmsContainer.client.root, pathHash, dms.file);
|
||||
await fs.unlink(dstFile);
|
||||
} catch (err) {
|
||||
if (err.code != 'ENOENT' && dms.file)
|
||||
throw err;
|
||||
}
|
||||
|
||||
await dms.destroy(myOptions);
|
||||
|
||||
const dstFolder = path.join(dmsContainer.client.root, pathHash);
|
||||
try {
|
||||
await fs.rmdir(dstFolder);
|
||||
} catch (err) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
|
@ -29,8 +29,7 @@ module.exports = Self => {
|
|||
http: {
|
||||
path: `/:id/downloadFile`,
|
||||
verb: 'GET'
|
||||
},
|
||||
accessScopes: ['DEFAULT', 'read:multimedia']
|
||||
}
|
||||
});
|
||||
|
||||
Self.downloadFile = async function(ctx, id) {
|
||||
|
|
|
@ -22,8 +22,8 @@ module.exports = Self => {
|
|||
|
||||
Self.removeFile = async(ctx, id, options) => {
|
||||
const models = Self.app.models;
|
||||
const myOptions = {};
|
||||
let tx;
|
||||
const myOptions = {};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('dms downloadFile()', () => {
|
||||
let dmsId = 1;
|
||||
|
@ -6,7 +6,7 @@ describe('dms downloadFile()', () => {
|
|||
it('should return a response for an employee with text content-type', async() => {
|
||||
let workerId = 1107;
|
||||
let ctx = {req: {accessToken: {userId: workerId}}};
|
||||
const result = await models.Dms.downloadFile(ctx, dmsId);
|
||||
const result = await app.models.Dms.downloadFile(ctx, dmsId);
|
||||
|
||||
expect(result[1]).toEqual('text/plain');
|
||||
});
|
||||
|
@ -16,7 +16,7 @@ describe('dms downloadFile()', () => {
|
|||
let ctx = {req: {accessToken: {userId: clientId}}};
|
||||
|
||||
let error;
|
||||
await models.Dms.downloadFile(ctx, dmsId).catch(e => {
|
||||
await app.models.Dms.downloadFile(ctx, dmsId).catch(e => {
|
||||
error = e;
|
||||
}).finally(() => {
|
||||
expect(error.message).toEqual(`You don't have enough privileges`);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('dms removeFile()', () => {
|
||||
let dmsId = 1;
|
||||
|
@ -8,7 +8,7 @@ describe('dms removeFile()', () => {
|
|||
let ctx = {req: {accessToken: {userId: clientId}}};
|
||||
|
||||
let error;
|
||||
await models.Dms.removeFile(ctx, dmsId).catch(e => {
|
||||
await app.models.Dms.removeFile(ctx, dmsId).catch(e => {
|
||||
error = e;
|
||||
}).finally(() => {
|
||||
expect(error.message).toEqual(`You don't have enough privileges`);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('dms updateFile()', () => {
|
||||
it(`should return an error for a user without enough privileges`, async() => {
|
||||
|
@ -11,7 +11,7 @@ describe('dms updateFile()', () => {
|
|||
let ctx = {req: {accessToken: {userId: clientId}}, args: {dmsTypeId: dmsTypeId}};
|
||||
|
||||
let error;
|
||||
await models.Dms.updateFile(ctx, dmsId, warehouseId, companyId, dmsTypeId).catch(e => {
|
||||
await app.models.Dms.updateFile(ctx, dmsId, warehouseId, companyId, dmsTypeId).catch(e => {
|
||||
error = e;
|
||||
}).finally(() => {
|
||||
expect(error.message).toEqual(`You don't have enough privileges`);
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('dms uploadFile()', () => {
|
||||
it(`should return an error for a user without enough privileges`, async() => {
|
||||
|
@ -7,7 +7,7 @@ describe('dms uploadFile()', () => {
|
|||
let ctx = {req: {accessToken: {userId: clientId}}, args: {dmsTypeId: ticketDmsTypeId}};
|
||||
|
||||
let error;
|
||||
await models.Dms.uploadFile(ctx).catch(e => {
|
||||
await app.models.Dms.uploadFile(ctx).catch(e => {
|
||||
error = e;
|
||||
}).finally(() => {
|
||||
expect(error.message).toEqual(`You don't have enough privileges`);
|
||||
|
|
|
@ -38,7 +38,7 @@ module.exports = Self => {
|
|||
{
|
||||
arg: 'hasFile',
|
||||
type: 'Boolean',
|
||||
description: 'True if has the original in paper'
|
||||
description: 'True if has an attached file'
|
||||
},
|
||||
{
|
||||
arg: 'hasFileAttached',
|
||||
|
|
|
@ -9,35 +9,35 @@ module.exports = Self => {
|
|||
accepts: [
|
||||
{
|
||||
arg: 'warehouseId',
|
||||
type: 'number',
|
||||
type: 'Number',
|
||||
description: 'The warehouse id',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'companyId',
|
||||
type: 'number',
|
||||
type: 'Number',
|
||||
description: 'The company id',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'dmsTypeId',
|
||||
type: 'number',
|
||||
type: 'Number',
|
||||
description: 'The dms type id',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'reference',
|
||||
type: 'string',
|
||||
type: 'String',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'description',
|
||||
type: 'string',
|
||||
type: 'String',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'hasFile',
|
||||
type: 'boolean',
|
||||
type: 'Boolean',
|
||||
description: 'True if has an attached file',
|
||||
required: true
|
||||
}],
|
||||
returns: {
|
||||
type: 'object',
|
||||
type: 'Object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
|
@ -49,6 +49,7 @@ module.exports = Self => {
|
|||
Self.uploadFile = async(ctx, options) => {
|
||||
const models = Self.app.models;
|
||||
const TempContainer = models.TempContainer;
|
||||
const DmsContainer = models.DmsContainer;
|
||||
const fileOptions = {};
|
||||
const args = ctx.args;
|
||||
|
||||
|
@ -78,21 +79,19 @@ module.exports = Self => {
|
|||
|
||||
const addedDms = [];
|
||||
for (const uploadedFile of files) {
|
||||
const newDms = await createDms(ctx, uploadedFile, myOptions);
|
||||
const pathHash = DmsContainer.getHash(newDms.id);
|
||||
|
||||
const file = await TempContainer.getFile(tempContainer.name, uploadedFile.name);
|
||||
srcFile = path.join(file.client.root, file.container, file.name);
|
||||
|
||||
const data = {
|
||||
workerFk: ctx.req.accessToken.userId,
|
||||
dmsTypeFk: args.dmsTypeId,
|
||||
companyFk: args.companyId,
|
||||
warehouseFk: args.warehouseId,
|
||||
reference: args.reference,
|
||||
description: args.description,
|
||||
contentType: uploadedFile.type,
|
||||
hasFile: args.hasFile
|
||||
};
|
||||
const extension = await models.DmsContainer.getFileExtension(uploadedFile.name);
|
||||
const newDms = await Self.createFromFile(data, extension, srcFile, myOptions);
|
||||
const dmsContainer = await DmsContainer.container(pathHash);
|
||||
const dstFile = path.join(dmsContainer.client.root, pathHash, newDms.file);
|
||||
|
||||
await fs.move(srcFile, dstFile, {
|
||||
overwrite: true
|
||||
});
|
||||
|
||||
addedDms.push(newDms);
|
||||
}
|
||||
|
||||
|
@ -108,4 +107,27 @@ module.exports = Self => {
|
|||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
async function createDms(ctx, file, myOptions) {
|
||||
const models = Self.app.models;
|
||||
const myUserId = ctx.req.accessToken.userId;
|
||||
const args = ctx.args;
|
||||
|
||||
const newDms = await Self.create({
|
||||
workerFk: myUserId,
|
||||
dmsTypeFk: args.dmsTypeId,
|
||||
companyFk: args.companyId,
|
||||
warehouseFk: args.warehouseId,
|
||||
reference: args.reference,
|
||||
description: args.description,
|
||||
contentType: file.type,
|
||||
hasFile: args.hasFile
|
||||
}, myOptions);
|
||||
|
||||
let fileName = file.name;
|
||||
const extension = models.DmsContainer.getFileExtension(fileName);
|
||||
fileName = `${newDms.id}.${extension}`;
|
||||
|
||||
return newDms.updateAttribute('file', fileName, myOptions);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('checkFile', {
|
||||
description: 'Check if exist docuware file',
|
||||
accessType: 'READ',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'number',
|
||||
description: 'The id',
|
||||
http: {source: 'path'}
|
||||
},
|
||||
{
|
||||
arg: 'fileCabinet',
|
||||
type: 'string',
|
||||
required: true,
|
||||
description: 'The fileCabinet name'
|
||||
},
|
||||
{
|
||||
arg: 'filter',
|
||||
type: 'object',
|
||||
description: 'The filter'
|
||||
},
|
||||
{
|
||||
arg: 'signed',
|
||||
type: 'boolean',
|
||||
description: 'If pdf is necessary to be signed'
|
||||
},
|
||||
],
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/:id/checkFile`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.checkFile = async function(id, fileCabinet, filter, signed) {
|
||||
const models = Self.app.models;
|
||||
const action = 'find';
|
||||
|
||||
const docuwareInfo = await models.Docuware.findOne({
|
||||
where: {
|
||||
code: fileCabinet,
|
||||
action: action
|
||||
}
|
||||
});
|
||||
|
||||
if (!filter) {
|
||||
filter = {
|
||||
condition: [
|
||||
{
|
||||
DBName: docuwareInfo.findById,
|
||||
Value: [id]
|
||||
}
|
||||
],
|
||||
sortOrder: [
|
||||
{
|
||||
Field: 'FILENAME',
|
||||
Direction: 'Desc'
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
if (signed) {
|
||||
filter.condition.push({
|
||||
DBName: 'ESTADO',
|
||||
Value: ['Firmado']
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const [response] = await Self.get(fileCabinet, filter);
|
||||
if (!response) return false;
|
||||
|
||||
return {id: response['Document ID']};
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
|
@ -1,184 +0,0 @@
|
|||
const axios = require('axios');
|
||||
|
||||
module.exports = Self => {
|
||||
/**
|
||||
* Returns basic headers
|
||||
*
|
||||
* @return {object} - The headers
|
||||
*/
|
||||
Self.getOptions = async() => {
|
||||
const docuwareConfig = await Self.app.models.DocuwareConfig.findOne();
|
||||
const now = Date.vnNow();
|
||||
let {url, username, password, token, expired} = docuwareConfig;
|
||||
|
||||
if (process.env.NODE_ENV && (!expired || expired < now + 60)) {
|
||||
const {data: {IdentityServiceUrl}} = await axios.get(`${url}/Home/IdentityServiceInfo`);
|
||||
const {data: {token_endpoint}} = await axios.get(`${IdentityServiceUrl}/.well-known/openid-configuration`);
|
||||
const {data} = await axios.post(token_endpoint, {
|
||||
grant_type: 'password',
|
||||
scope: 'docuware.platform',
|
||||
client_id: 'docuware.platform.net.client',
|
||||
username,
|
||||
password
|
||||
}, {headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/x-www-form-urlencoded'
|
||||
}});
|
||||
|
||||
const newToken = data.access_token;
|
||||
token = data.token_type + ' ' + newToken;
|
||||
await docuwareConfig.updateAttributes({
|
||||
token,
|
||||
expired: now + data.expires_in
|
||||
});
|
||||
}
|
||||
|
||||
const headers = {
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': token
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
url,
|
||||
headers
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the dialog id
|
||||
*
|
||||
* @param {string} code - The fileCabinet name
|
||||
* @param {string} action - The fileCabinet name
|
||||
* @param {string} fileCabinetId - Optional The fileCabinet name
|
||||
* @return {number} - The fileCabinet id
|
||||
*/
|
||||
Self.getDialog = async(code, action, fileCabinetId) => {
|
||||
if (!process.env.NODE_ENV)
|
||||
return Math.floor(Math.random() + 100);
|
||||
|
||||
const docuwareInfo = await Self.app.models.Docuware.findOne({
|
||||
where: {
|
||||
code,
|
||||
action
|
||||
}
|
||||
});
|
||||
if (!fileCabinetId) fileCabinetId = await Self.getFileCabinet(code);
|
||||
|
||||
const options = await Self.getOptions();
|
||||
|
||||
const response = await axios.get(`${options.url}/FileCabinets/${fileCabinetId}/dialogs`, options.headers);
|
||||
const dialogs = response.data.Dialog;
|
||||
const dialogId = dialogs.find(dialogs => dialogs.DisplayName === docuwareInfo.dialogName).Id;
|
||||
|
||||
return dialogId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the fileCabinetId
|
||||
*
|
||||
* @param {string} code - The fileCabinet code
|
||||
* @return {number} - The fileCabinet id
|
||||
*/
|
||||
Self.getFileCabinet = async code => {
|
||||
if (!process.env.NODE_ENV)
|
||||
return Math.floor(Math.random() + 100);
|
||||
|
||||
const options = await Self.getOptions();
|
||||
const docuwareInfo = await Self.app.models.Docuware.findOne({
|
||||
where: {
|
||||
code
|
||||
}
|
||||
});
|
||||
|
||||
const fileCabinetResponse = await axios.get(`${options.url}/FileCabinets`, options.headers);
|
||||
const fileCabinets = fileCabinetResponse.data.FileCabinet;
|
||||
const fileCabinetId = fileCabinets.find(fileCabinet => fileCabinet.Name === docuwareInfo.fileCabinetName).Id;
|
||||
|
||||
return fileCabinetId;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns docuware data
|
||||
*
|
||||
* @param {string} code - The fileCabinet code
|
||||
* @param {object} filter - The filter for docuware
|
||||
* @param {object} parse - The fields parsed
|
||||
* @return {object} - The data
|
||||
*/
|
||||
Self.get = async(code, filter, parse) => {
|
||||
if (!process.env.NODE_ENV) return;
|
||||
|
||||
const options = await Self.getOptions();
|
||||
const fileCabinetId = await Self.getFileCabinet(code);
|
||||
const dialogId = await Self.getDialog(code, 'find', fileCabinetId);
|
||||
|
||||
const data = await axios.post(
|
||||
`${options.url}/FileCabinets/${fileCabinetId}/Query/DialogExpression?dialogId=${dialogId}`,
|
||||
filter,
|
||||
options.headers
|
||||
);
|
||||
return parser(data.data, parse);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns docuware data
|
||||
*
|
||||
* @param {string} code - The fileCabinet code
|
||||
* @param {any} id - The id of docuware
|
||||
* @param {object} parse - The fields parsed
|
||||
* @return {object} - The data
|
||||
*/
|
||||
Self.getById = async(code, id, parse) => {
|
||||
if (!process.env.NODE_ENV) return;
|
||||
|
||||
const docuwareInfo = await Self.app.models.Docuware.findOne({
|
||||
fields: ['findById'],
|
||||
where: {
|
||||
code,
|
||||
action: 'find'
|
||||
}
|
||||
});
|
||||
const filter = {
|
||||
condition: [
|
||||
{
|
||||
DBName: docuwareInfo.findById,
|
||||
Value: [id]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
return Self.get(code, filter, parse);
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns docuware data filtered
|
||||
*
|
||||
* @param {array} data - The data
|
||||
* @param {object} parse - The fields parsed
|
||||
* @return {object} - The data parsed
|
||||
*/
|
||||
function parser(data, parse) {
|
||||
if (!(data && data.Items)) return data;
|
||||
|
||||
const parsed = [];
|
||||
for (item of data.Items) {
|
||||
const itemParsed = {};
|
||||
item.Fields.map(field => {
|
||||
if (field.ItemElementName.includes('Date')) field.Item = toDate(field.Item);
|
||||
if (!parse) return itemParsed[field.FieldLabel] = field.Item;
|
||||
if (parse[field.FieldLabel])
|
||||
itemParsed[parse[field.FieldLabel]] = field.Item;
|
||||
});
|
||||
parsed.push(itemParsed);
|
||||
}
|
||||
return parsed;
|
||||
}
|
||||
|
||||
function toDate(value) {
|
||||
if (!value) return;
|
||||
return new Date(Number(value.substring(6, 19)));
|
||||
}
|
||||
};
|
|
@ -1,78 +0,0 @@
|
|||
const {Email} = require('vn-print');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('deliveryNoteEmail', {
|
||||
description: 'Sends the delivery note email with an docuware attached PDF',
|
||||
accessType: 'WRITE',
|
||||
accessScopes: ['docuwareDeliveryNoteEmail'],
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'number',
|
||||
required: true,
|
||||
description: 'The ticket id',
|
||||
},
|
||||
{
|
||||
arg: 'recipientId',
|
||||
type: 'number',
|
||||
description: 'The client id',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
arg: 'recipient',
|
||||
type: 'string',
|
||||
description: 'The recipient email',
|
||||
required: false,
|
||||
}
|
||||
],
|
||||
returns: [
|
||||
{
|
||||
arg: 'body',
|
||||
type: 'file',
|
||||
root: true
|
||||
}, {
|
||||
arg: 'Content-Type',
|
||||
type: 'String',
|
||||
http: {target: 'header'}
|
||||
}, {
|
||||
arg: 'Content-Disposition',
|
||||
type: 'String',
|
||||
http: {target: 'header'}
|
||||
}
|
||||
],
|
||||
http: {
|
||||
path: '/delivery-note-email',
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.deliveryNoteEmail = async(ctx, id, recipientId, recipient) => {
|
||||
const models = Self.app.models;
|
||||
const args = Object.assign({}, ctx.args);
|
||||
const params = {
|
||||
recipient: args.recipient,
|
||||
lang: ctx.req.getLocale()
|
||||
};
|
||||
|
||||
delete args.ctx;
|
||||
for (const param in args)
|
||||
params[param] = args[param];
|
||||
|
||||
if (!recipient) {
|
||||
client = await models.Client.findById(recipientId, {fields: ['email']});
|
||||
params.recipient = client.email;
|
||||
}
|
||||
|
||||
const email = new Email('delivery-note', params);
|
||||
|
||||
const docuwareFile = await models.Docuware.download(id, 'deliveryNote');
|
||||
|
||||
return email.send({
|
||||
overrideAttachments: true,
|
||||
attachments: [{
|
||||
filename: `${id}.pdf`,
|
||||
content: docuwareFile[0]
|
||||
}]
|
||||
});
|
||||
};
|
||||
};
|
|
@ -1,67 +0,0 @@
|
|||
/* eslint max-len: ["error", { "code": 180 }]*/
|
||||
const axios = require('axios');
|
||||
const UserError = require('vn-loopback/util/user-error');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('download', {
|
||||
description: 'Download an docuware PDF',
|
||||
accessType: 'READ',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'id',
|
||||
type: 'number',
|
||||
description: 'The ticket id',
|
||||
http: {source: 'path'}
|
||||
},
|
||||
{
|
||||
arg: 'fileCabinet',
|
||||
type: 'string',
|
||||
description: 'The file cabinet'
|
||||
},
|
||||
{
|
||||
arg: 'filter',
|
||||
type: 'object',
|
||||
description: 'The filter'
|
||||
}
|
||||
],
|
||||
returns: [
|
||||
{
|
||||
arg: 'body',
|
||||
type: 'file',
|
||||
root: true
|
||||
}, {
|
||||
arg: 'Content-Type',
|
||||
type: 'string',
|
||||
http: {target: 'header'}
|
||||
}, {
|
||||
arg: 'Content-Disposition',
|
||||
type: 'string',
|
||||
http: {target: 'header'}
|
||||
}
|
||||
],
|
||||
http: {
|
||||
path: `/:id/download`,
|
||||
verb: 'GET'
|
||||
},
|
||||
accessScopes: ['DEFAULT', 'read:multimedia']
|
||||
});
|
||||
|
||||
Self.download = async function(id, fileCabinet, filter) {
|
||||
const models = Self.app.models;
|
||||
|
||||
const docuwareFile = await models.Docuware.checkFile(id, fileCabinet, filter);
|
||||
if (!docuwareFile) throw new UserError('The DOCUWARE PDF document does not exists');
|
||||
|
||||
const fileCabinetId = await Self.getFileCabinet(fileCabinet);
|
||||
const options = await Self.getOptions();
|
||||
options.headers.responseType = 'stream';
|
||||
|
||||
const fileName = `filename="${id}.pdf"`;
|
||||
const contentType = 'application/pdf';
|
||||
const downloadUri = `${options.url}/FileCabinets/${fileCabinetId}/Documents/${docuwareFile.id}/FileDownload?targetFileType=Auto&keepAnnotations=false`;
|
||||
|
||||
const stream = await axios.get(downloadUri, options.headers);
|
||||
|
||||
return [stream.data, contentType, fileName];
|
||||
};
|
||||
};
|
|
@ -1,28 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('docuware download()', () => {
|
||||
const ticketId = 1;
|
||||
|
||||
const docuwareModel = models.Docuware;
|
||||
const fileCabinetName = 'deliveryNote';
|
||||
|
||||
it('should return false if there are no documents', async() => {
|
||||
spyOn(docuwareModel, 'get').and.returnValue((new Promise(resolve => resolve({Items: []}))));
|
||||
|
||||
const result = await models.Docuware.checkFile(ticketId, fileCabinetName, null, true);
|
||||
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should return the document data', async() => {
|
||||
const docuwareId = 1;
|
||||
const response = [{
|
||||
'Document ID': docuwareId
|
||||
}];
|
||||
spyOn(docuwareModel, 'get').and.returnValue((new Promise(resolve => resolve(response))));
|
||||
|
||||
const result = await models.Docuware.checkFile(ticketId, fileCabinetName, null, true);
|
||||
|
||||
expect(result.id).toEqual(docuwareId);
|
||||
});
|
||||
});
|
|
@ -1,139 +0,0 @@
|
|||
const axios = require('axios');
|
||||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('Docuware core', () => {
|
||||
const fileCabinetCode = 'deliveryNote';
|
||||
beforeAll(async() => {
|
||||
process.env.NODE_ENV = 'testing';
|
||||
|
||||
const docuwareInfo = await models.Docuware.findOne({
|
||||
where: {
|
||||
code: fileCabinetCode
|
||||
}
|
||||
});
|
||||
|
||||
spyOn(axios, 'get').and.callFake(url => {
|
||||
if (url.includes('IdentityServiceInfo')) return {data: {IdentityServiceUrl: 'IdentityServiceUrl'}};
|
||||
if (url.includes('IdentityServiceUrl')) return {data: {token_endpoint: 'token_endpoint'}};
|
||||
if (url.includes('dialogs')) {
|
||||
return {
|
||||
data: {
|
||||
Dialog: [
|
||||
{
|
||||
DisplayName: 'find',
|
||||
Id: 'getDialogTest'
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (url.includes('FileCabinets')) {
|
||||
return {data: {
|
||||
FileCabinet: [
|
||||
{
|
||||
Name: docuwareInfo.fileCabinetName,
|
||||
Id: 'getFileCabinetTest'
|
||||
}
|
||||
]
|
||||
}};
|
||||
}
|
||||
});
|
||||
|
||||
spyOn(axios, 'post').and.callFake(url => {
|
||||
if (url.includes('token_endpoint')) {
|
||||
return {data: {
|
||||
access_token: 'access_token',
|
||||
token_type: 'bearer',
|
||||
expires_in: 10000
|
||||
}};
|
||||
}
|
||||
if (url.includes('DialogExpression')) {
|
||||
return {data: {
|
||||
Items: [{
|
||||
Fields: [
|
||||
{
|
||||
ItemElementName: 'integer',
|
||||
FieldLabel: 'firstRequiredField',
|
||||
Item: 1
|
||||
},
|
||||
{
|
||||
ItemElementName: 'string',
|
||||
FieldLabel: 'secondRequiredField',
|
||||
Item: 'myName'
|
||||
},
|
||||
{
|
||||
ItemElementName: 'integer',
|
||||
FieldLabel: 'notRequiredField',
|
||||
Item: 2
|
||||
}
|
||||
]
|
||||
}]
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
delete process.env.NODE_ENV;
|
||||
});
|
||||
|
||||
describe('getOptions()', () => {
|
||||
it('should return url and headers', async() => {
|
||||
const result = await models.Docuware.getOptions();
|
||||
|
||||
expect(result.url).toBeDefined();
|
||||
expect(result.headers).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Dialog()', () => {
|
||||
it('should return dialogId', async() => {
|
||||
const result = await models.Docuware.getDialog('deliveryNote', 'find', 'randomFileCabinetId');
|
||||
|
||||
expect(result).toEqual('getDialogTest');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFileCabinet()', () => {
|
||||
it('should return fileCabinetId', async() => {
|
||||
const result = await models.Docuware.getFileCabinet(fileCabinetCode);
|
||||
|
||||
expect(result).toEqual('getFileCabinetTest');
|
||||
});
|
||||
});
|
||||
|
||||
describe('get()', () => {
|
||||
it('should return data without parse', async() => {
|
||||
const [result] = await models.Docuware.get('deliveryNote');
|
||||
|
||||
expect(result.firstRequiredField).toEqual(1);
|
||||
});
|
||||
|
||||
it('should return data with parse', async() => {
|
||||
const parse = {
|
||||
'firstRequiredField': 'id',
|
||||
'secondRequiredField': 'name',
|
||||
};
|
||||
const [result] = await models.Docuware.get('deliveryNote', null, parse);
|
||||
|
||||
expect(result.id).toEqual(1);
|
||||
expect(result.name).toEqual('myName');
|
||||
expect(result.notRequiredField).not.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getById()', () => {
|
||||
it('should return data', async() => {
|
||||
spyOn(models.Docuware, 'get');
|
||||
await models.Docuware.getById('deliveryNote', 1);
|
||||
|
||||
expect(models.Docuware.get).toHaveBeenCalledWith(
|
||||
'deliveryNote',
|
||||
{condition: [Object({DBName: 'N__ALBAR_N', Value: [1]})]},
|
||||
undefined
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,47 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const axios = require('axios');
|
||||
const stream = require('stream');
|
||||
|
||||
describe('docuware download()', () => {
|
||||
const userId = 9;
|
||||
const ticketId = 1;
|
||||
const ctx = {
|
||||
req: {
|
||||
|
||||
accessToken: {userId: userId},
|
||||
headers: {origin: 'http://localhost:5000'},
|
||||
}
|
||||
};
|
||||
|
||||
const docuwareModel = models.Docuware;
|
||||
const fileCabinetName = 'deliveryNote';
|
||||
|
||||
beforeAll(() => {
|
||||
spyOn(docuwareModel, 'getFileCabinet').and.returnValue((new Promise(resolve => resolve(Math.random()))));
|
||||
spyOn(docuwareModel, 'getDialog').and.returnValue((new Promise(resolve => resolve(Math.random()))));
|
||||
});
|
||||
|
||||
it('should return error if file not exist', async() => {
|
||||
spyOn(docuwareModel, 'checkFile').and.returnValue(false);
|
||||
spyOn(axios, 'get').and.returnValue(new stream.PassThrough({objectMode: true}));
|
||||
|
||||
let error;
|
||||
try {
|
||||
await models.Docuware.download(ctx, ticketId, fileCabinetName);
|
||||
} catch (e) {
|
||||
error = e.message;
|
||||
}
|
||||
|
||||
expect(error).toEqual('The DOCUWARE PDF document does not exists');
|
||||
});
|
||||
|
||||
it('should return the downloaded file if exist file ', async() => {
|
||||
spyOn(docuwareModel, 'checkFile').and.returnValue({});
|
||||
spyOn(axios, 'get').and.returnValue(new stream.PassThrough({objectMode: true}));
|
||||
|
||||
const result = await models.Docuware.download(ticketId, fileCabinetName);
|
||||
|
||||
expect(result[1]).toEqual('application/pdf');
|
||||
expect(result[2]).toEqual(`filename="${ticketId}.pdf"`);
|
||||
});
|
||||
});
|
|
@ -1,63 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
|
||||
describe('docuware upload()', () => {
|
||||
const userId = 9;
|
||||
const ticketIds = [10];
|
||||
const ctx = {
|
||||
args: {ticketIds},
|
||||
req: {
|
||||
getLocale: () => {
|
||||
return 'en';
|
||||
},
|
||||
accessToken: {userId: userId},
|
||||
headers: {origin: 'http://localhost:5000'},
|
||||
}
|
||||
};
|
||||
|
||||
const docuwareModel = models.Docuware;
|
||||
const ticketModel = models.Ticket;
|
||||
const fileCabinetName = 'deliveryNote';
|
||||
|
||||
beforeAll(() => {
|
||||
spyOn(docuwareModel, 'getFileCabinet').and.returnValue(new Promise(resolve => resolve(Math.random())));
|
||||
spyOn(docuwareModel, 'getDialog').and.returnValue(new Promise(resolve => resolve(Math.random())));
|
||||
});
|
||||
|
||||
it('should try upload file', async() => {
|
||||
const tx = await models.Docuware.beginTransaction({});
|
||||
spyOn(ticketModel, 'deliveryNotePdf').and.returnValue(new Promise(resolve => resolve({})));
|
||||
|
||||
let error;
|
||||
try {
|
||||
const options = {transaction: tx};
|
||||
const user = await models.UserConfig.findById(userId, null, options);
|
||||
await user.updateAttribute('tabletFk', 'Tablet1', options);
|
||||
await models.Docuware.upload(ctx, ticketIds, fileCabinetName, options);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
await tx.rollback();
|
||||
}
|
||||
|
||||
expect(error.message).toEqual('Action not allowed on the test environment');
|
||||
});
|
||||
|
||||
it('should throw error when not have tablet assigned', async() => {
|
||||
const tx = await models.Docuware.beginTransaction({});
|
||||
spyOn(ticketModel, 'deliveryNotePdf').and.returnValue(new Promise(resolve => resolve({})));
|
||||
|
||||
let error;
|
||||
try {
|
||||
const options = {transaction: tx};
|
||||
await models.Docuware.upload(ctx, ticketIds, fileCabinetName, options);
|
||||
|
||||
await tx.rollback();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
await tx.rollback();
|
||||
}
|
||||
|
||||
expect(error.message).toEqual('This user does not have an assigned tablet');
|
||||
});
|
||||
});
|
|
@ -1,164 +0,0 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
const axios = require('axios');
|
||||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('upload', {
|
||||
description: 'Upload docuware PDFs',
|
||||
accessType: 'WRITE',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'ticketIds',
|
||||
type: ['number'],
|
||||
description: 'The ticket ids',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
arg: 'fileCabinet',
|
||||
type: 'string',
|
||||
description: 'The file cabinet',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/upload`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.upload = async function(ctx, ticketIds, fileCabinet, options) {
|
||||
delete ctx.args.ticketIds;
|
||||
const models = Self.app.models;
|
||||
const action = 'store';
|
||||
|
||||
const myOptions = {};
|
||||
|
||||
if (typeof options == 'object')
|
||||
Object.assign(myOptions, options);
|
||||
|
||||
const userConfig = await models.UserConfig.findById(ctx.req.accessToken.userId, {
|
||||
fields: ['tabletFk']
|
||||
}, myOptions);
|
||||
|
||||
if (!userConfig?.tabletFk)
|
||||
throw new UserError('This user does not have an assigned tablet');
|
||||
|
||||
const docuwareOptions = await Self.getOptions();
|
||||
const fileCabinetId = await Self.getFileCabinet(fileCabinet);
|
||||
const dialogId = await Self.getDialog(fileCabinet, action, fileCabinetId);
|
||||
|
||||
const uploaded = [];
|
||||
for (id of ticketIds) {
|
||||
// get delivery note
|
||||
ctx.args.id = id;
|
||||
const deliveryNote = await models.Ticket.deliveryNotePdf(ctx, {
|
||||
id,
|
||||
type: 'deliveryNote'
|
||||
}, myOptions);
|
||||
// get ticket data
|
||||
const ticket = await models.Ticket.findById(id, {
|
||||
include: [{
|
||||
relation: 'client',
|
||||
scope: {
|
||||
fields: ['id', 'name', 'fi']
|
||||
}
|
||||
}]
|
||||
}, myOptions);
|
||||
|
||||
// upload file
|
||||
const templateJson = {
|
||||
'Fields': [
|
||||
{
|
||||
'FieldName': 'N__ALBAR_N',
|
||||
'ItemElementName': 'string',
|
||||
'Item': id,
|
||||
},
|
||||
{
|
||||
'FieldName': 'CIF_PROVEEDOR',
|
||||
'ItemElementName': 'string',
|
||||
'Item': ticket.client().fi,
|
||||
},
|
||||
{
|
||||
'FieldName': 'CODIGO_PROVEEDOR',
|
||||
'ItemElementName': 'string',
|
||||
'Item': ticket.client().id,
|
||||
},
|
||||
{
|
||||
'FieldName': 'NOMBRE_PROVEEDOR',
|
||||
'ItemElementName': 'string',
|
||||
'Item': ticket.client().name + ' - ' + id,
|
||||
},
|
||||
{
|
||||
'FieldName': 'FECHA_FACTURA',
|
||||
'ItemElementName': 'date',
|
||||
'Item': ticket.shipped,
|
||||
},
|
||||
{
|
||||
'FieldName': 'TOTAL_FACTURA',
|
||||
'ItemElementName': 'Decimal',
|
||||
'Item': ticket.totalWithVat,
|
||||
},
|
||||
{
|
||||
'FieldName': 'ESTADO',
|
||||
'ItemElementName': 'string',
|
||||
'Item': 'Pendiente procesar',
|
||||
},
|
||||
{
|
||||
'FieldName': 'FIRMA_',
|
||||
'ItemElementName': 'string',
|
||||
'Item': 'Si',
|
||||
},
|
||||
{
|
||||
'FieldName': 'FILTRO_TABLET',
|
||||
'ItemElementName': 'string',
|
||||
'Item': userConfig.tabletFk,
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
if (!isProduction(false))
|
||||
throw new UserError('Action not allowed on the test environment');
|
||||
|
||||
// delete old
|
||||
const docuwareFile = await models.Docuware.checkFile(id, fileCabinet, false);
|
||||
if (docuwareFile) {
|
||||
const deleteJson = {
|
||||
'Field': [{'FieldName': 'ESTADO', 'Item': 'Pendiente eliminar', 'ItemElementName': 'String'}]
|
||||
};
|
||||
const deleteUri = `${docuwareOptions.url}/FileCabinets/${fileCabinetId}/Documents/${docuwareFile.id}/Fields`;
|
||||
await axios.put(deleteUri, deleteJson, docuwareOptions.headers);
|
||||
}
|
||||
|
||||
const uploadUri = `${docuwareOptions.url}/FileCabinets/${fileCabinetId}/Documents?StoreDialogId=${dialogId}`;
|
||||
const FormData = require('form-data');
|
||||
const data = new FormData();
|
||||
|
||||
data.append('document', JSON.stringify(templateJson), 'schema.json');
|
||||
data.append('file[]', deliveryNote[0], 'file.pdf');
|
||||
const uploadOptions = {
|
||||
headers: {
|
||||
'Content-Type': 'multipart/form-data',
|
||||
'X-File-ModifiedDate': Date.vnNew(),
|
||||
'Authorization': docuwareOptions.headers.headers.Authorization,
|
||||
...data.getHeaders()
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
await axios.post(uploadUri, data, uploadOptions);
|
||||
} catch (err) {
|
||||
const $t = ctx.req.__;
|
||||
const message = $t('Failed to upload delivery note', {id});
|
||||
if (uploaded.length)
|
||||
await models.TicketTracking.setDelivered(ctx, uploaded, myOptions);
|
||||
throw new UserError(message);
|
||||
}
|
||||
uploaded.push(id);
|
||||
}
|
||||
return models.TicketTracking.setDelivered(ctx, ticketIds, myOptions);
|
||||
};
|
||||
};
|
|
@ -1,14 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`bucket`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12)
|
||||
SET
|
||||
bucket_id = @col2,
|
||||
bucket_type_id = @col4,
|
||||
description = @col5,
|
||||
x_size = @col6,
|
||||
y_size = @col7,
|
||||
z_size = @col8,
|
||||
entry_date = STR_TO_DATE(@col10, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col11, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col12, '%Y%m%d%H%i')
|
|
@ -1,10 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`bucket_type`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
|
||||
SET
|
||||
bucket_type_id = @col2,
|
||||
description = @col3,
|
||||
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`feature`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
|
||||
SET
|
||||
item_id = @col2,
|
||||
feature_type_id = @col3,
|
||||
feature_value = @col4,
|
||||
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')
|
|
@ -1,10 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`genus`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
|
||||
SET
|
||||
genus_id = @col2,
|
||||
latin_genus_name = @col3,
|
||||
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')
|
|
@ -1,14 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`item`
|
||||
CHARACTER SET ascii
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12)
|
||||
SET id = @col2,
|
||||
product_name = @col4,
|
||||
name = @col5,
|
||||
plant_id = @col7,
|
||||
group_id = @col9,
|
||||
entry_date = STR_TO_DATE(@col10, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col11, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col12, '%Y%m%d%H%i')
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`item_feature`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8)
|
||||
SET
|
||||
item_id = @col2,
|
||||
feature = @col3,
|
||||
regulation_type = @col4,
|
||||
presentation_order = @col5,
|
||||
entry_date = STR_TO_DATE(@col6, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col7, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col8, '%Y%m%d%H%i')
|
|
@ -1,10 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`item_group`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6)
|
||||
SET
|
||||
group_code = @col2,
|
||||
dutch_group_description = @col3,
|
||||
entry_date = STR_TO_DATE(@col4, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col5, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col6, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`plant`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9)
|
||||
SET
|
||||
plant_id = @col3,
|
||||
genus_id = @col4,
|
||||
specie_id = @col5,
|
||||
entry_date = STR_TO_DATE(@col7, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col8, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col9, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`specie`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
|
||||
SET
|
||||
specie_id = @col2,
|
||||
genus_id = @col3,
|
||||
latin_species_name = @col4,
|
||||
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`supplier`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7, @col8, @col9, @col10, @col11, @col12, @col13, @col14, @col15, @col16, @col17, @col18, @col19, @col20)
|
||||
SET
|
||||
GLNAddressCode = @col2,
|
||||
supplier_id = @col4,
|
||||
company_name = @col3,
|
||||
entry_date = STR_TO_DATE(@col9, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col10, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col11, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`type`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
|
||||
SET
|
||||
type_id = @col2,
|
||||
type_group_id = @col3,
|
||||
description = @col4,
|
||||
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')
|
|
@ -1,11 +0,0 @@
|
|||
LOAD DATA LOCAL INFILE ?
|
||||
INTO TABLE `edi`.`value`
|
||||
FIELDS TERMINATED BY ';'
|
||||
LINES TERMINATED BY '\n' (@col1, @col2, @col3, @col4, @col5, @col6, @col7)
|
||||
SET
|
||||
type_id = @col2,
|
||||
type_value = @col3,
|
||||
type_description = @col4,
|
||||
entry_date = STR_TO_DATE(@col5, '%Y%m%d'),
|
||||
expiry_date = IFNULL(NULL,STR_TO_DATE(@col6, '%Y%m%d')),
|
||||
change_date_time = STR_TO_DATE(@col7, '%Y%m%d%H%i')
|
|
@ -1,239 +0,0 @@
|
|||
/* eslint no-console: "off" */
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('updateData', {
|
||||
description: 'Updates schema data from external provider',
|
||||
accessType: 'WRITE',
|
||||
returns: {
|
||||
type: 'object',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/updateData`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.updateData = async ctx => {
|
||||
const models = Self.app.models;
|
||||
|
||||
// Get files checksum
|
||||
const tx = await Self.beginTransaction({});
|
||||
|
||||
try {
|
||||
const options = {transaction: tx, userId: ctx.req.accessToken.userId};
|
||||
const files = await Self.rawSql('SELECT name, checksum, keyValue FROM edi.fileMultiConfig', null, options);
|
||||
|
||||
const updatableFiles = [];
|
||||
for (const file of files) {
|
||||
const fileChecksum = await getChecksum(file);
|
||||
|
||||
if (file.checksum != fileChecksum) {
|
||||
updatableFiles.push({
|
||||
name: file.name,
|
||||
checksum: fileChecksum
|
||||
});
|
||||
} else
|
||||
console.debug(`File already updated, skipping...`);
|
||||
}
|
||||
|
||||
if (updatableFiles.length === 0)
|
||||
return false;
|
||||
|
||||
// Download files
|
||||
const container = await models.TempContainer.container('edi');
|
||||
const tempPath = path.join(container.client.root, container.name);
|
||||
|
||||
let remoteFile;
|
||||
let tempDir;
|
||||
let tempFile;
|
||||
|
||||
const fileNames = updatableFiles.map(file => file.name);
|
||||
|
||||
const tables = await Self.rawSql(`
|
||||
SELECT fileName, toTable, file
|
||||
FROM edi.tableMultiConfig
|
||||
WHERE file IN (?)`, [fileNames], options);
|
||||
|
||||
for (const table of tables) {
|
||||
const fileName = table.file;
|
||||
|
||||
remoteFile = `codes/${fileName}.ZIP`;
|
||||
tempDir = `${tempPath}/${fileName}`;
|
||||
tempFile = `${tempPath}/${fileName}.zip`;
|
||||
|
||||
try {
|
||||
await fs.readFile(tempFile);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
console.debug(`Downloading file ${fileName}...`);
|
||||
const downloadOutput = await downloadFile(remoteFile, tempFile);
|
||||
if (downloadOutput.error)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
await extractFile(fileName, tempFile, tempDir);
|
||||
|
||||
console.debug(`Updating table ${table.toTable}...`);
|
||||
await dumpData(tempDir, table, options);
|
||||
}
|
||||
|
||||
// Update files checksum
|
||||
for (const file of updatableFiles) {
|
||||
console.log(`Updating file ${file.name} checksum...`);
|
||||
await Self.rawSql(`
|
||||
UPDATE edi.fileMultiConfig
|
||||
SET checksum = ?
|
||||
WHERE name = ?`,
|
||||
[file.checksum, file.name], options);
|
||||
}
|
||||
|
||||
await tx.commit();
|
||||
|
||||
// Clean files
|
||||
try {
|
||||
console.debug(`Cleaning files...`);
|
||||
await fs.remove(tempPath);
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT')
|
||||
throw e;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
await tx.rollback();
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
let ftpClient;
|
||||
async function getFtpClient() {
|
||||
if (!ftpClient) {
|
||||
const [ftpConfig] = await Self.rawSql('SELECT host, user, password FROM edi.ftpConfig');
|
||||
console.debug(`Openning FTP connection to ${ftpConfig.host}...\n`);
|
||||
|
||||
const FtpClient = require('ftps');
|
||||
|
||||
ftpClient = new FtpClient({
|
||||
host: ftpConfig.host,
|
||||
username: ftpConfig.user,
|
||||
password: ftpConfig.password,
|
||||
procotol: 'ftp',
|
||||
additionalLftpCommands: 'set ssl:verify-certificate no'
|
||||
});
|
||||
}
|
||||
|
||||
return ftpClient;
|
||||
}
|
||||
|
||||
async function getChecksum(file) {
|
||||
const ftpClient = await getFtpClient();
|
||||
console.debug(`Checking checksum for file ${file.name}...`);
|
||||
|
||||
ftpClient.cat(`codes/${file.name}.TXT`);
|
||||
|
||||
const response = await new Promise((resolve, reject) => {
|
||||
ftpClient.exec((err, response) => {
|
||||
if (err || response.error) {
|
||||
console.debug(`Error downloading checksum file... ${response.error}`);
|
||||
return reject(response.error || err);
|
||||
}
|
||||
|
||||
resolve(response);
|
||||
});
|
||||
});
|
||||
|
||||
if (response && response.data) {
|
||||
const fileContents = response.data;
|
||||
const rows = fileContents.split('\n');
|
||||
const row = rows[4];
|
||||
const columns = row.split(/\s+/);
|
||||
|
||||
let fileChecksum;
|
||||
if (file.keyValue)
|
||||
fileChecksum = columns[1];
|
||||
|
||||
if (!file.keyValue)
|
||||
fileChecksum = columns[0];
|
||||
|
||||
return fileChecksum;
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadFile(remoteFile, tempFile) {
|
||||
const ftpClient = await getFtpClient();
|
||||
|
||||
ftpClient.get(remoteFile, tempFile);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
ftpClient.exec((err, response) => {
|
||||
if (err || response.error) {
|
||||
console.debug(`Error downloading file... ${response.error}`);
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
resolve(response);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function extractFile(fileName, tempFile, tempDir) {
|
||||
const JSZip = require('jszip');
|
||||
|
||||
try {
|
||||
await fs.mkdir(tempDir);
|
||||
console.debug(`Extracting file ${fileName}...`);
|
||||
} catch (error) {
|
||||
if (error.code !== 'EEXIST')
|
||||
throw e;
|
||||
}
|
||||
|
||||
const fileStream = await fs.readFile(tempFile);
|
||||
if (fileStream) {
|
||||
const zip = new JSZip();
|
||||
const zipContents = await zip.loadAsync(fileStream);
|
||||
|
||||
if (!zipContents) return;
|
||||
|
||||
const fileNames = Object.keys(zipContents.files);
|
||||
|
||||
for (const fileName of fileNames) {
|
||||
const fileContent = await zip.file(fileName).async('nodebuffer');
|
||||
const dest = path.join(tempDir, fileName);
|
||||
await fs.writeFile(dest, fileContent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function dumpData(tempDir, table, options) {
|
||||
const toTable = table.toTable;
|
||||
const baseName = table.fileName;
|
||||
|
||||
console.log(`Emptying table ${toTable}...`);
|
||||
const tableName = `edi.${toTable}`;
|
||||
await Self.rawSql(`DELETE FROM ??`, [tableName]);
|
||||
|
||||
const dirFiles = await fs.readdir(tempDir);
|
||||
const files = dirFiles.filter(file => file.startsWith(baseName));
|
||||
|
||||
for (const file of files) {
|
||||
console.log(`Dumping data from file ${file}...`);
|
||||
|
||||
const templatePath = path.join(__dirname, `./sql/${toTable}.sql`);
|
||||
const sqlTemplate = await fs.readFile(templatePath, 'utf8');
|
||||
const filePath = path.join(tempDir, file);
|
||||
|
||||
await Self.rawSql(sqlTemplate, [filePath], options);
|
||||
await Self.rawSql(`
|
||||
UPDATE edi.tableMultiConfig
|
||||
SET updated = ?
|
||||
WHERE fileName = ?
|
||||
`, [Date.vnNew(), baseName], options);
|
||||
}
|
||||
|
||||
console.log(`Updated table ${toTable}\n`);
|
||||
}
|
||||
};
|
|
@ -47,8 +47,7 @@ module.exports = Self => {
|
|||
http: {
|
||||
path: `/:collection/:size/:id/download`,
|
||||
verb: 'GET'
|
||||
},
|
||||
accessScopes: ['DEFAULT', 'read:multimedia']
|
||||
}
|
||||
});
|
||||
|
||||
Self.download = async function(ctx, collection, size, id) {
|
||||
|
@ -68,26 +67,23 @@ module.exports = Self => {
|
|||
|
||||
if (!image) return false;
|
||||
|
||||
const hasReadRole = await models.ImageCollection.hasReadRole(ctx, collection);
|
||||
const hasReadRole = models.ImageCollection.hasReadRole(ctx, collection);
|
||||
if (!hasReadRole)
|
||||
throw new UserError(`You don't have enough privileges`);
|
||||
|
||||
const container = await models.ImageContainer.getContainer(collection);
|
||||
const rootPath = container.client.root;
|
||||
const fileSrc = path.join(rootPath, collection, size);
|
||||
|
||||
const ext = image.name.substring((image.name.length - 4));
|
||||
const fileName = ext !== '.png' ? `${image.name}.png` : image.name;
|
||||
const file = {
|
||||
path: `${fileSrc}/${fileName}`,
|
||||
path: `${fileSrc}/${image.name}.png`,
|
||||
contentType: 'image/png',
|
||||
name: image.name
|
||||
name: `${image.name}.png`
|
||||
};
|
||||
|
||||
if (!fs.existsSync(file.path)) return [];
|
||||
|
||||
await fs.access(file.path);
|
||||
const stream = fs.createReadStream(file.path);
|
||||
return [stream, file.contentType, `filename="${fileName}"`];
|
||||
return [stream, file.contentType, `filename="${file.name}"`];
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,130 +0,0 @@
|
|||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const UserError = require('vn-loopback/util/user-error');
|
||||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('scrub', {
|
||||
description: 'Deletes images without database reference',
|
||||
accessType: 'WRITE',
|
||||
accepts: [
|
||||
{
|
||||
arg: 'collection',
|
||||
type: 'string',
|
||||
description: 'The collection name',
|
||||
required: true
|
||||
}, {
|
||||
arg: 'remove',
|
||||
type: 'boolean',
|
||||
description: 'Delete instead of move images to trash'
|
||||
}, {
|
||||
arg: 'limit',
|
||||
type: 'integer',
|
||||
description: 'Maximum number of images to clean'
|
||||
}, {
|
||||
arg: 'dryRun',
|
||||
type: 'boolean',
|
||||
description: 'Simulate actions'
|
||||
}, {
|
||||
arg: 'skipLock',
|
||||
type: 'boolean',
|
||||
description: 'Wether to skip exclusive lock'
|
||||
}
|
||||
],
|
||||
returns: {
|
||||
type: 'integer',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/scrub`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.scrub = async function(collection, remove, limit, dryRun, skipLock) {
|
||||
const $ = Self.app.models;
|
||||
|
||||
dryRun = dryRun || !isProduction(false);
|
||||
|
||||
const instance = await $.ImageCollection.findOne({
|
||||
fields: ['id'],
|
||||
where: {name: collection}
|
||||
});
|
||||
if (!instance)
|
||||
throw new UserError('Collection does not exist');
|
||||
|
||||
const container = await $.ImageContainer.container(collection);
|
||||
const rootPath = container.client.root;
|
||||
|
||||
let tx;
|
||||
let opts;
|
||||
const lockName = 'salix.Image.scrub';
|
||||
|
||||
if (!skipLock) {
|
||||
tx = await Self.beginTransaction({timeout: null});
|
||||
opts = {transaction: tx};
|
||||
|
||||
const [row] = await Self.rawSql(
|
||||
`SELECT GET_LOCK(?, 10) hasLock`, [lockName], opts);
|
||||
if (!row.hasLock)
|
||||
throw new UserError('Cannot obtain exclusive lock');
|
||||
}
|
||||
|
||||
try {
|
||||
const now = Date.vnNew().toJSON();
|
||||
const scrubDir = path.join(rootPath, '.scrub', now);
|
||||
|
||||
const collectionDir = path.join(rootPath, collection);
|
||||
const sizes = await fs.readdir(collectionDir);
|
||||
let cleanCount = 0;
|
||||
|
||||
mainLoop: for (const size of sizes) {
|
||||
const sizeDir = path.join(collectionDir, size);
|
||||
const scrubSizeDir = path.join(scrubDir, collection, size);
|
||||
const images = await fs.readdir(sizeDir);
|
||||
for (const image of images) {
|
||||
const imageName = path.parse(image).name;
|
||||
const count = await Self.count({
|
||||
collectionFk: collection,
|
||||
name: imageName
|
||||
}, opts);
|
||||
const exists = count > 0;
|
||||
let scrubDirCreated = false;
|
||||
if (!exists) {
|
||||
const srcFile = path.join(sizeDir, image);
|
||||
if (remove !== true) {
|
||||
if (!scrubDirCreated) {
|
||||
if (!dryRun)
|
||||
await fs.mkdir(scrubSizeDir, {recursive: true});
|
||||
scrubDirCreated = true;
|
||||
}
|
||||
const dstFile = path.join(scrubSizeDir, image);
|
||||
if (!dryRun) await fs.rename(srcFile, dstFile);
|
||||
} else {
|
||||
try {
|
||||
if (!dryRun) await fs.unlink(srcFile);
|
||||
} catch (err) {
|
||||
console.error(err.message);
|
||||
}
|
||||
}
|
||||
|
||||
cleanCount++;
|
||||
if (limit && cleanCount == limit)
|
||||
break mainLoop;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cleanCount;
|
||||
} finally {
|
||||
if (!skipLock) {
|
||||
try {
|
||||
await Self.rawSql(`DO RELEASE_LOCK(?)`, [lockName], opts);
|
||||
await tx.rollback();
|
||||
} catch (err) {
|
||||
console.error(err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
|
@ -1,24 +1,23 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('image download()', () => {
|
||||
const collection = 'user';
|
||||
const size = '160x160';
|
||||
const employeeId = 1;
|
||||
const developerId = 9;
|
||||
const jessicaJonesId = 1110;
|
||||
const ctx = {req: {accessToken: {userId: employeeId}}};
|
||||
|
||||
it('should return the image content-type of the user', async() => {
|
||||
const image = await models.Image.download(ctx, collection, size, developerId);
|
||||
const userId = 9;
|
||||
const image = await app.models.Image.download(ctx, collection, size, userId);
|
||||
const contentType = image[1];
|
||||
|
||||
expect(contentType).toEqual('image/png');
|
||||
});
|
||||
|
||||
it('should return the user profile picture', async() => {
|
||||
const image = await models.Image.download(ctx, collection, size, jessicaJonesId);
|
||||
const fileName = image[2];
|
||||
it(`should return false if the user doesn't have image`, async() => {
|
||||
const userId = 1110;
|
||||
const image = await app.models.Image.download(ctx, collection, size, userId);
|
||||
|
||||
expect(fileName).toMatch('1110.png');
|
||||
expect(image).toBeFalse();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
const app = require('vn-loopback/server/server');
|
||||
|
||||
describe('image upload()', () => {
|
||||
describe('as buyer', () => {
|
||||
|
@ -16,7 +16,7 @@ describe('image upload()', () => {
|
|||
|
||||
let error;
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ describe('image upload()', () => {
|
|||
});
|
||||
|
||||
it('should call to the TempContainer upload method for the collection "catalog"', async() => {
|
||||
const containerModel = models.TempContainer;
|
||||
const containerModel = app.models.TempContainer;
|
||||
spyOn(containerModel, 'upload');
|
||||
|
||||
const ctx = {req: {accessToken: {userId: buyerId}},
|
||||
|
@ -36,7 +36,7 @@ describe('image upload()', () => {
|
|||
};
|
||||
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) { }
|
||||
|
||||
expect(containerModel.upload).toHaveBeenCalled();
|
||||
|
@ -49,7 +49,7 @@ describe('image upload()', () => {
|
|||
const itemId = 4;
|
||||
|
||||
it('should be able to call to the TempContainer upload method for the collection "user"', async() => {
|
||||
const containerModel = models.TempContainer;
|
||||
const containerModel = app.models.TempContainer;
|
||||
spyOn(containerModel, 'upload');
|
||||
|
||||
const ctx = {req: {accessToken: {userId: marketingId}},
|
||||
|
@ -60,14 +60,14 @@ describe('image upload()', () => {
|
|||
};
|
||||
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) { }
|
||||
|
||||
expect(containerModel.upload).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should be able to call to the TempContainer upload method for the collection "catalog"', async() => {
|
||||
const containerModel = models.TempContainer;
|
||||
const containerModel = app.models.TempContainer;
|
||||
spyOn(containerModel, 'upload');
|
||||
|
||||
const ctx = {req: {accessToken: {userId: marketingId}},
|
||||
|
@ -78,7 +78,7 @@ describe('image upload()', () => {
|
|||
};
|
||||
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) { }
|
||||
|
||||
expect(containerModel.upload).toHaveBeenCalled();
|
||||
|
@ -91,22 +91,25 @@ describe('image upload()', () => {
|
|||
const itemId = 4;
|
||||
|
||||
it('should upload a file for the collection "user" and call to the TempContainer upload method', async() => {
|
||||
const containerModel = models.TempContainer;
|
||||
const containerModel = app.models.TempContainer;
|
||||
spyOn(containerModel, 'upload');
|
||||
|
||||
const ctx = {req: {accessToken: {userId: hhrrId}},
|
||||
args: {id: itemId, collection: 'user'}
|
||||
args: {
|
||||
id: itemId,
|
||||
collection: 'user'
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) { }
|
||||
|
||||
expect(containerModel.upload).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should try to upload a file for the collection "catalog" and throw a privilege error', async() => {
|
||||
const ctx = {req: {accessToken: {userId: 1}},
|
||||
const ctx = {req: {accessToken: {userId: hhrrId}},
|
||||
args: {
|
||||
id: workerId,
|
||||
collection: 'catalog'
|
||||
|
@ -115,7 +118,7 @@ describe('image upload()', () => {
|
|||
|
||||
let error;
|
||||
try {
|
||||
await models.Image.upload(ctx);
|
||||
await app.models.Image.upload(ctx);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
const fs = require('fs/promises');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const isProduction = require('vn-loopback/server/boot/isProduction');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('upload', {
|
||||
|
@ -13,13 +12,19 @@ module.exports = Self => {
|
|||
type: 'Number',
|
||||
description: 'The entity id',
|
||||
required: true
|
||||
}, {
|
||||
},
|
||||
{
|
||||
arg: 'collection',
|
||||
type: 'string',
|
||||
description: 'The collection name',
|
||||
required: true
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
arg: 'fileName',
|
||||
type: 'string',
|
||||
description: 'The file name',
|
||||
required: true
|
||||
}],
|
||||
returns: {
|
||||
type: 'Object',
|
||||
root: true
|
||||
|
@ -36,13 +41,13 @@ module.exports = Self => {
|
|||
const fileOptions = {};
|
||||
const args = ctx.args;
|
||||
|
||||
let tempFilePath;
|
||||
let srcFile;
|
||||
try {
|
||||
const hasWriteRole = await models.ImageCollection.hasWriteRole(ctx, args.collection);
|
||||
if (!hasWriteRole)
|
||||
throw new UserError(`You don't have enough privileges`);
|
||||
|
||||
if (!isProduction())
|
||||
if (process.env.NODE_ENV == 'test')
|
||||
throw new UserError(`Action not allowed on the test environment`);
|
||||
|
||||
// Upload file to temporary path
|
||||
|
@ -51,22 +56,15 @@ module.exports = Self => {
|
|||
const [uploadedFile] = Object.values(uploaded.files).map(file => {
|
||||
return file[0];
|
||||
});
|
||||
|
||||
const file = await TempContainer.getFile(tempContainer.name, uploadedFile.name);
|
||||
tempFilePath = path.join(file.client.root, file.container, file.name);
|
||||
srcFile = path.join(file.client.root, file.container, file.name);
|
||||
|
||||
const fileName = `${args.id}.png`;
|
||||
await models.Image.registerImage(args.collection, srcFile, args.fileName, args.id);
|
||||
} catch (e) {
|
||||
if (fs.existsSync(srcFile))
|
||||
await fs.unlink(srcFile);
|
||||
|
||||
await models.Image.resize({
|
||||
collectionName: args.collection,
|
||||
srcFile: tempFilePath,
|
||||
fileName: fileName,
|
||||
entityId: args.id
|
||||
});
|
||||
} finally {
|
||||
try {
|
||||
await fs.unlink(tempFilePath);
|
||||
} catch (error) { }
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethodCtx('getVersion', {
|
||||
description: 'gets app version data',
|
||||
accessType: 'READ',
|
||||
accepts: [{
|
||||
arg: 'app',
|
||||
type: 'string',
|
||||
required: true
|
||||
}],
|
||||
returns: {
|
||||
type: ['object'],
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/getVersion`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.getVersion = async(ctx, app) => {
|
||||
const {models} = Self.app;
|
||||
const userId = ctx.req.accessToken.userId;
|
||||
|
||||
const workerFk = await models.WorkerAppTester.findOne({
|
||||
where: {
|
||||
workerFk: userId
|
||||
}
|
||||
});
|
||||
let fields = ['id', 'appName'];
|
||||
|
||||
if (workerFk)
|
||||
fields = fields.concat(['isVersionBetaCritical', 'versionBeta', 'urlBeta']);
|
||||
else
|
||||
fields = fields.concat(['isVersionCritical', 'version', 'urlProduction']);
|
||||
|
||||
const filter = {
|
||||
where: {
|
||||
appName: app
|
||||
},
|
||||
fields,
|
||||
};
|
||||
|
||||
const result = await Self.findOne(filter);
|
||||
return {
|
||||
isVersionCritical: result?.isVersionBetaCritical ?? result?.isVersionCritical,
|
||||
version: result?.versionBeta ?? result?.version,
|
||||
url: result?.urlBeta ?? result?.urlProduction
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
const {models} = require('vn-loopback/server/server');
|
||||
|
||||
describe('mobileAppVersionControl getVersion()', () => {
|
||||
const appName = 'delivery';
|
||||
const appNameVersion = '9.2';
|
||||
const appNameVersionBeta = '9.7';
|
||||
beforeAll(async() => {
|
||||
ctx = {
|
||||
req: {
|
||||
accessToken: {},
|
||||
headers: {origin: 'http://localhost'},
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
it('should get the version app', async() => {
|
||||
ctx.req.accessToken.userId = 9;
|
||||
const {version} = await models.MobileAppVersionControl.getVersion(ctx, appName);
|
||||
|
||||
expect(version).toEqual(appNameVersion);
|
||||
});
|
||||
|
||||
it('should get the beta version app', async() => {
|
||||
ctx.req.accessToken.userId = 66;
|
||||
const {version} = await models.MobileAppVersionControl.getVersion(ctx, appName);
|
||||
|
||||
expect(version).toEqual(appNameVersionBeta);
|
||||
});
|
||||
});
|
|
@ -1,20 +0,0 @@
|
|||
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:mrw="http://www.mrw.es/">
|
||||
<soap:Header>
|
||||
<mrw:AuthInfo>
|
||||
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
|
||||
<mrw:CodigoAbonado><%= clientType %></mrw:CodigoAbonado>
|
||||
<mrw:CodigoDepartamento/>
|
||||
<mrw:UserName><%= mrw.user %></mrw:UserName>
|
||||
<mrw:Password><%= mrw.password %></mrw:Password>
|
||||
</mrw:AuthInfo>
|
||||
</soap:Header>
|
||||
<soap:Body>
|
||||
<mrw:CancelarEnvio>
|
||||
<mrw:request>
|
||||
<mrw:CancelaEnvio>
|
||||
<mrw:NumeroEnvioOriginal><%= externalId %></mrw:NumeroEnvioOriginal>
|
||||
</mrw:CancelaEnvio>
|
||||
</mrw:request>
|
||||
</mrw:CancelarEnvio>
|
||||
</soap:Body>
|
||||
</soap:Envelope>
|
|
@ -1,48 +0,0 @@
|
|||
const axios = require('axios');
|
||||
const fs = require('fs');
|
||||
const ejs = require('ejs');
|
||||
const {DOMParser} = require('xmldom');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('cancelShipment', {
|
||||
description: 'Cancel a shipment by providing the expedition ID, interacting with MRW WebService',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'expeditionFk',
|
||||
type: 'number',
|
||||
required: true
|
||||
}],
|
||||
returns: {
|
||||
type: 'boolean',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/cancelShipment`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.cancelShipment = async expeditionFk => {
|
||||
const models = Self.app.models;
|
||||
|
||||
const mrw = await models.MrwConfig.findOne();
|
||||
const {externalId} = await models.Expedition.findById(expeditionFk);
|
||||
const clientType = await models.MrwConfig.getClientType(expeditionFk);
|
||||
const template = fs.readFileSync(__dirname + '/cancelShipment.ejs', 'utf-8');
|
||||
const renderedXml = ejs.render(template, {mrw, externalId, clientType});
|
||||
const response = await axios.post(mrw.url, renderedXml, {
|
||||
headers: {
|
||||
'Content-Type': 'application/soap+xml; charset=utf-8'
|
||||
}
|
||||
});
|
||||
|
||||
const xmlString = response.data;
|
||||
const parser = new DOMParser();
|
||||
const xmlDoc = parser.parseFromString(xmlString, 'text/xml');
|
||||
|
||||
await Self.rawSql('CALL util.debugAdd(?,?);', ['cancelShipment', xmlDoc]);
|
||||
|
||||
const result = xmlDoc.getElementsByTagName('Mensaje')[0].textContent;
|
||||
return result.toLowerCase().includes('se ha cancelado correctamente');
|
||||
};
|
||||
};
|
|
@ -1,51 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:mrw="http://www.mrw.es/">
|
||||
<soap:Header>
|
||||
<mrw:AuthInfo>
|
||||
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
|
||||
<mrw:CodigoAbonado><%= clientType %></mrw:CodigoAbonado>
|
||||
<mrw:CodigoDepartamento/>
|
||||
<mrw:UserName><%= mrw.user %></mrw:UserName>
|
||||
<mrw:Password><%= mrw.password %></mrw:Password>
|
||||
</mrw:AuthInfo>
|
||||
</soap:Header>
|
||||
<soap:Body>
|
||||
<mrw:TransmEnvio>
|
||||
<mrw:request>
|
||||
<mrw:DatosEntrega>
|
||||
<mrw:Direccion>
|
||||
<mrw:CodigoTipoVia/>
|
||||
<mrw:Via><%= expeditionData.street %></mrw:Via>
|
||||
<mrw:Numero/>
|
||||
<mrw:Resto/>
|
||||
<mrw:CodigoPostal><%= expeditionData.postalCode %></mrw:CodigoPostal>
|
||||
<mrw:Poblacion><%= expeditionData.city %></mrw:Poblacion>
|
||||
<mrw:Provincia/>
|
||||
<mrw:CodigoPais/>
|
||||
</mrw:Direccion>
|
||||
<mrw:Nif><%= expeditionData.fi %></mrw:Nif>
|
||||
<mrw:Nombre><%= expeditionData.clientName %></mrw:Nombre>
|
||||
<mrw:Telefono><%= expeditionData.mobile %></mrw:Telefono>
|
||||
<mrw:Observaciones><%= expeditionData.deliveryObservation %></mrw:Observaciones>
|
||||
</mrw:DatosEntrega>
|
||||
<mrw:DatosServicio>
|
||||
<mrw:Fecha><%= expeditionData.created %></mrw:Fecha>
|
||||
<mrw:Referencia><%= expeditionData.reference %></mrw:Referencia>
|
||||
<mrw:CodigoServicio><%= expeditionData.serviceType %></mrw:CodigoServicio>
|
||||
<mrw:NumeroBultos>1</mrw:NumeroBultos>
|
||||
<mrw:EntregaSabado><%= expeditionData.weekDays %></mrw:EntregaSabado>
|
||||
<mrw:Reembolso/>
|
||||
<mrw:ImporteReembolso/>
|
||||
<mrw:Bultos>
|
||||
<mrw:BultoRequest>
|
||||
<mrw:Alto><%= mrw.defaultHeight %></mrw:Alto>
|
||||
<mrw:Largo><%= mrw.defaultLength %></mrw:Largo>
|
||||
<mrw:Ancho><%= mrw.defaultWidth %></mrw:Ancho>
|
||||
<mrw:Peso><%= mrw.defaultWeight %></mrw:Peso>
|
||||
</mrw:BultoRequest>
|
||||
</mrw:Bultos>
|
||||
</mrw:DatosServicio>
|
||||
</mrw:request>
|
||||
</mrw:TransmEnvio>
|
||||
</soap:Body>
|
||||
</soap:Envelope>
|
|
@ -1,91 +0,0 @@
|
|||
const UserError = require('vn-loopback/util/user-error');
|
||||
|
||||
module.exports = Self => {
|
||||
Self.remoteMethod('createShipment', {
|
||||
description: 'Create an expedition and return a base64Binary label from de MRW WebService',
|
||||
accessType: 'WRITE',
|
||||
accepts: [{
|
||||
arg: 'expeditionFk',
|
||||
type: 'number',
|
||||
required: true
|
||||
}],
|
||||
returns: {
|
||||
type: ['object'],
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/createShipment`,
|
||||
verb: 'POST'
|
||||
}
|
||||
});
|
||||
|
||||
Self.createShipment = async expeditionFk => {
|
||||
const models = Self.app.models;
|
||||
const mrw = await Self.getConfig();
|
||||
const clientType = await models.MrwConfig.getClientType(expeditionFk);
|
||||
|
||||
const today = Date.vnNew();
|
||||
const [hours, minutes] = mrw?.expeditionDeadLine ? mrw.expeditionDeadLine.split(':').map(Number) : [0, 0];
|
||||
|
||||
const deadLine = Date.vnNew();
|
||||
deadLine.setHours(hours, minutes, 0);
|
||||
|
||||
if (today > deadLine && (!mrw.notified || mrw.notified.setHours(0, 0, 0, 0) !== today.setHours(0, 0, 0, 0))) {
|
||||
await models.NotificationQueue.create({notificationFk: 'mrw-deadline'});
|
||||
await mrw.updateAttributes({notified: Date.vnNow()});
|
||||
}
|
||||
|
||||
const query =
|
||||
`SELECT
|
||||
CASE co.code
|
||||
WHEN 'ES' THEN a.postalCode
|
||||
WHEN 'PT' THEN LEFT(a.postalCode, mc.portugalPostCodeTrim)
|
||||
WHEN 'AD' THEN REPLACE(a.postalCode, 'AD', '00')
|
||||
END postalCode,
|
||||
a.city,
|
||||
a.street,
|
||||
co.code countryCode,
|
||||
c.fi,
|
||||
c.name clientName,
|
||||
IFNULL(a.mobile, c.mobile) mobile,
|
||||
DATE_FORMAT(t.shipped, '%d/%m/%Y') created,
|
||||
t.shipped,
|
||||
CONCAT( e.ticketFk, LPAD(e.counter, mc.counterWidth, '0')) reference,
|
||||
LPAD(IF(mw.serviceType IS NULL, ms.serviceType, mw.serviceType), mc.serviceTypeWidth, '0') serviceType,
|
||||
IF(mw.weekdays, 'S', 'N') weekDays,
|
||||
ta.description deliveryObservation
|
||||
FROM expedition e
|
||||
JOIN ticket t ON e.ticketFk = t.id
|
||||
JOIN agencyMode am ON am.id = t.agencyModeFk
|
||||
JOIN mrwService ms ON ms.agencyModeCodeFk = am.code
|
||||
LEFT JOIN mrwServiceWeekday mw ON mw.agencyModeCodeFk = am.code
|
||||
AND mw.weekDays & (1 << WEEKDAY(t.landed))
|
||||
JOIN client c ON t.clientFk = c.id
|
||||
JOIN address a ON t.addressFk = a.id
|
||||
LEFT JOIN ticketObservation ta ON ta.ticketFk = t.id
|
||||
AND ta.observationTypeFk IN (SELECT id FROM observationType ot WHERE ot.code = 'agency')
|
||||
JOIN province p ON a.provinceFk = p.id
|
||||
JOIN country co ON co.id = p.countryFk
|
||||
JOIN mrwConfig mc
|
||||
WHERE e.id = ?
|
||||
LIMIT 1`;
|
||||
|
||||
const [expeditionData] = await Self.rawSql(query, [expeditionFk]);
|
||||
|
||||
if (expeditionData?.shipped.setHours(0, 0, 0, 0) < today.setHours(0, 0, 0, 0))
|
||||
throw new UserError(`This ticket has a shipped date earlier than today`);
|
||||
|
||||
const shipmentResponse = await Self.sendXmlDoc(
|
||||
__dirname + `/createShipment.ejs`,
|
||||
{mrw, expeditionData, clientType},
|
||||
'application/soap+xml'
|
||||
);
|
||||
const shipmentId = Self.getTextByTag(shipmentResponse, 'NumeroEnvio');
|
||||
|
||||
if (!shipmentId) throw new UserError(Self.getTextByTag(shipmentResponse, 'Mensaje'));
|
||||
|
||||
const file = await models.MrwConfig.getLabel(shipmentId, clientType);
|
||||
|
||||
return {shipmentId, file};
|
||||
};
|
||||
};
|
|
@ -1,25 +0,0 @@
|
|||
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:mrw="http://www.mrw.es/">
|
||||
<soapenv:Header>
|
||||
<mrw:AuthInfo>
|
||||
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
|
||||
<mrw:CodigoAbonado><%= clientType %></mrw:CodigoAbonado>
|
||||
<mrw:CodigoDepartamento/>
|
||||
<mrw:UserName><%= mrw.user %></mrw:UserName>
|
||||
<mrw:Password><%= mrw.password %></mrw:Password>
|
||||
</mrw:AuthInfo>
|
||||
</soapenv:Header>
|
||||
<soapenv:Body>
|
||||
<mrw:GetEtiquetaEnvio>
|
||||
<mrw:request>
|
||||
<mrw:NumeroEnvio><%= shipmentId %></mrw:NumeroEnvio>
|
||||
<mrw:NumerosEtiqueta>1</mrw:NumerosEtiqueta>
|
||||
<mrw:SeparadorNumerosEnvio></mrw:SeparadorNumerosEnvio>
|
||||
<mrw:FechaInicioEnvio></mrw:FechaInicioEnvio>
|
||||
<mrw:FechaFinEnvio></mrw:FechaFinEnvio>
|
||||
<mrw:TipoEtiquetaEnvio>0</mrw:TipoEtiquetaEnvio>
|
||||
<mrw:ReportTopMargin>0</mrw:ReportTopMargin>
|
||||
<mrw:ReportLeftMargin>0</mrw:ReportLeftMargin>
|
||||
</mrw:request>
|
||||
</mrw:GetEtiquetaEnvio>
|
||||
</soapenv:Body>
|
||||
</soapenv:Envelope>
|
|
@ -1,37 +0,0 @@
|
|||
module.exports = Self => {
|
||||
Self.remoteMethod('getLabel', {
|
||||
description: 'Return a base64Binary label from de MRW WebService',
|
||||
accessType: 'READ',
|
||||
accepts: [{
|
||||
arg: 'shipmentId',
|
||||
type: 'string',
|
||||
required: true
|
||||
},
|
||||
{
|
||||
arg: 'clientType',
|
||||
type: 'string',
|
||||
required: true
|
||||
},
|
||||
],
|
||||
returns: {
|
||||
type: 'string',
|
||||
root: true
|
||||
},
|
||||
http: {
|
||||
path: `/getLabel`,
|
||||
verb: 'GET'
|
||||
}
|
||||
});
|
||||
|
||||
Self.getLabel = async(shipmentId, clientType) => {
|
||||
const mrw = await Self.getConfig();
|
||||
|
||||
const getLabelResponse = await Self.sendXmlDoc(
|
||||
__dirname + `/getLabel.ejs`,
|
||||
{mrw, shipmentId, clientType},
|
||||
'text/xml'
|
||||
);
|
||||
|
||||
return Self.getTextByTag(getLabelResponse, 'EtiquetaFile');
|
||||
};
|
||||
};
|
|
@ -1,159 +0,0 @@
|
|||
const models = require('vn-loopback/server/server').models;
|
||||
const axios = require('axios');
|
||||
const fs = require('fs');
|
||||
|
||||
const filter = {notificationFk: 'mrw-deadline'};
|
||||
const mockBase64Binary = 'base64BinaryString';
|
||||
const ticket1 = {
|
||||
'id': '44',
|
||||
'clientFk': 1101,
|
||||
'shipped': Date.vnNew(),
|
||||
'nickname': 'MRW',
|
||||
'addressFk': 1,
|
||||
'agencyModeFk': 999
|
||||
};
|
||||
let expedition;
|
||||
const expedition1 = {
|
||||
'agencyModeFk': 999,
|
||||
'ticketFk': 44,
|
||||
'freightItemFk': 71,
|
||||
'created': '2001-01-01',
|
||||
'counter': 1,
|
||||
'workerFk': 18,
|
||||
'packagingFk': '94',
|
||||
'hostFk': '',
|
||||
'stateTypeFk': 3,
|
||||
'hasNewRoute': 0,
|
||||
'isBox': 71,
|
||||
'editorFk': 100
|
||||
};
|
||||
|
||||
describe('MRWConfig createShipment()', () => {
|
||||
beforeAll(async() => {
|
||||
await models.Agency.create(
|
||||
{'id': 999, 'name': 'mrw'}
|
||||
);
|
||||
|
||||
await models.AgencyMode.create(
|
||||
{'id': 999, 'name': 'mrw', 'agencyFk': 999, 'code': 'mrw'}
|
||||
|
||||
);
|
||||
|
||||
await models.MrwService.create(
|
||||
{'agencyModeCodeFk': 'mrw', 'clientType': '000001', 'serviceType': 105, 'kg': 10}
|
||||
);
|
||||
|
||||
await createMrwConfig();
|
||||
|
||||
await models.Ticket.create(ticket1);
|
||||
expedition = await models.Expedition.create(expedition1);
|
||||
});
|
||||
|
||||
afterAll(async() => {
|
||||
await cleanFixtures();
|
||||
await models.Ticket.destroyAll(ticket1);
|
||||
await models.Expedition.destroyAll(ticket1);
|
||||
});
|
||||
|
||||
beforeEach(async() => {
|
||||
const mockPostResponses = [
|
||||
{data: fs.readFileSync(__dirname + '/mockGetLabel.xml', 'utf-8')},
|
||||
{data: fs.readFileSync(__dirname + '/mockCreateShipment.xml', 'utf-8')}
|
||||
];
|
||||
|
||||
spyOn(axios, 'post').and.callFake(() => Promise.resolve(mockPostResponses.pop()));
|
||||
await cleanFixtures();
|
||||
});
|
||||
|
||||
async function cleanFixtures() {
|
||||
await models.NotificationQueue.destroyAll(filter);
|
||||
await models.MrwConfig.updateAll({id: 1}, {expeditionDeadLine: null, notified: null});
|
||||
}
|
||||
|
||||
async function createMrwConfig() {
|
||||
await models.MrwConfig.create(
|
||||
{
|
||||
'id': 1,
|
||||
'url': 'https://url.com',
|
||||
'user': 'user',
|
||||
'password': 'password',
|
||||
'franchiseCode': 'franchiseCode',
|
||||
'subscriberCode': 'subscriberCode',
|
||||
'clientTypeWidth': 6
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async function getLastNotification() {
|
||||
return models.NotificationQueue.findOne({
|
||||
order: 'id DESC',
|
||||
where: filter
|
||||
});
|
||||
}
|
||||
|
||||
it('should create a shipment and return a base64Binary label', async() => {
|
||||
const {file} = await models.MrwConfig.createShipment(expedition.id);
|
||||
|
||||
expect(file).toEqual(mockBase64Binary);
|
||||
});
|
||||
|
||||
it('should fail if mrwConfig has no data', async() => {
|
||||
let error;
|
||||
await models.MrwConfig.destroyAll();
|
||||
await models.MrwConfig.createShipment(expedition.id).catch(e => {
|
||||
error = e;
|
||||
}).finally(async() => {
|
||||
expect(error.message).toEqual(`MRW service is not configured`);
|
||||
});
|
||||
await createMrwConfig();
|
||||
|
||||
expect(error).toBeDefined();
|
||||
});
|
||||
|
||||
it('should fail if expeditionFk is not a MrwExpedition', async() => {
|
||||
let error;
|
||||
await models.MrwConfig.createShipment(15).catch(e => {
|
||||
error = e;
|
||||
}).finally(async() => {
|
||||
expect(error.message).toEqual(`ClientType not available`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail if the creation date of this ticket is before the current date', async() => {
|
||||
let error;
|
||||
const yesterday = Date.vnNew();
|
||||
yesterday.setDate(yesterday.getDate() - 1);
|
||||
|
||||
await models.Ticket.updateAll({id: ticket1.id}, {shipped: yesterday});
|
||||
await models.MrwConfig.createShipment(expedition.id).catch(e => {
|
||||
error = e;
|
||||
}).finally(async() => {
|
||||
expect(error.message).toEqual(`This ticket has a shipped date earlier than today`);
|
||||
});
|
||||
await models.Ticket.updateAll({id: ticket1.id}, {shipped: Date.vnNew()});
|
||||
});
|
||||
|
||||
it('should send mail if you are past the dead line and is not notified today', async() => {
|
||||
await models.MrwConfig.updateAll({id: 1}, {expeditionDeadLine: '10:00:00', notified: null});
|
||||
await models.MrwConfig.createShipment(expedition.id);
|
||||
const notification = await getLastNotification();
|
||||
|
||||
expect(notification.notificationFk).toEqual(filter.notificationFk);
|
||||
});
|
||||
|
||||
it('should send mail if you are past the dead line and it is notified from another day', async() => {
|
||||
await models.MrwConfig.updateAll({id: 1}, {expeditionDeadLine: '10:00:00', notified: new Date()});
|
||||
await models.MrwConfig.createShipment(expedition.id);
|
||||
const notification = await getLastNotification();
|
||||
|
||||
expect(notification.notificationFk).toEqual(filter.notificationFk);
|
||||
});
|
||||
|
||||
it('should not send mail if you are past the dead line and it is notified', async() => {
|
||||
await models.MrwConfig.updateAll({id: 1}, {expeditionDeadLine: '10:00:00', notified: Date.vnNew()});
|
||||
await models.MrwConfig.createShipment(expedition.id);
|
||||
const notification = await getLastNotification();
|
||||
|
||||
expect(notification).toEqual(null);
|
||||
});
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue