Compare commits

..

377 Commits
master ... dev

Author SHA1 Message Date
Alex Moreno 728e6e921a build: init version
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 13:38:46 +01:00
Alex Moreno f316349ac2 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 13:38:21 +01:00
Alex Moreno 7ec29d1972 Merge branch 'master' of https://gitea.verdnatura.es/verdnatura/salix into test
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 13:38:10 +01:00
Alex Moreno 40492d6489 Merge pull request '8627-devToTest' (!3474) from 8627-devToTest into test
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3474
Reviewed-by: Guillermo Bonet <guillermo@verdnatura.es>
2025-02-18 12:33:29 +00:00
Jon Elias 0f0504e590 Merge pull request '#8555: Added new filter field' (!3434) from 8555-AddNewField into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3434
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-18 11:46:12 +00:00
Jon Elias 6118154ef9 Merge branch 'dev' into 8555-AddNewField
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-18 11:44:01 +00:00
Jon Elias 5a1c826f15 Merge pull request '#8606: Show correct text of duplicate entry error' (!3461) from 8606-FixZoneModule into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3461
Reviewed-by: Javier Segarra <jsegarra@verdnatura.es>
2025-02-18 11:29:46 +00:00
Jon Elias 16bb716abd Merge branch 'dev' into 8606-FixZoneModule
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-18 10:36:49 +00:00
Alex Moreno dc989cb075 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 8627-devToTest
gitea/salix/pipeline/pr-test This commit looks good Details
2025-02-18 10:35:00 +01:00
Jorge Penadés e29e5e28ed Merge pull request 'fix: refs #8388 update booking status in toUnbook method' (!3462) from 8388-fixUnbook into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3462
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-18 09:31:22 +00:00
Alex Moreno 2386874a96 fix: refs #8627 add claim in getSales
gitea/salix/pipeline/pr-test This commit looks good Details
2025-02-18 10:28:00 +01:00
Alex Moreno 06c123d7f2 fix: refs #8627 ticket isTaxDataChecked 2025-02-18 10:15:28 +01:00
Alex Moreno a8c03548d9 fix: refs #8627 update stateFk assignment to use code instead of id
gitea/salix/pipeline/pr-test This commit looks good Details
2025-02-18 09:51:08 +01:00
Alex Moreno 664ba03425 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 09:26:30 +01:00
Alex Moreno ca8bc43262 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 08:53:24 +01:00
Alex Moreno 669b7cd67f Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-18 08:45:19 +01:00
Juan Ferrer 5a5da0a863 fix: refs #6695 Code refactor
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 19:46:14 +01:00
Juan Ferrer d6c8d96d52 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 19:40:14 +01:00
Juan Ferrer 17410202ad fix: refs #6695 Code refactor 2025-02-17 19:40:12 +01:00
PAU ROVIRA ROSALENY f687ad35fd Merge pull request 'feat: #8497 added availabled on travel module' (!3469) from 8497-travelAvailabled into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3469
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-17 15:11:50 +00:00
Juan Ferrer fbc185fe00 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 15:48:11 +01:00
Juan Ferrer 2d765913ff fix: refs #6695 Code refactor 2025-02-17 15:48:09 +01:00
Javier Segarra 7e2129c94f Merge pull request 'fix: getSuggestedTickets' (!3454) from fix_getSuggestedTickets_byFixtures into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3454
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-17 14:41:39 +00:00
Javier Segarra dbd298b7d8 Merge branch 'dev' into fix_getSuggestedTickets_byFixtures
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 14:39:01 +00:00
PAU ROVIRA ROSALENY 2bac7cbd09 Merge branch 'dev' into 8497-travelAvailabled
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 13:51:17 +00:00
PAU ROVIRA ROSALENY e7cc754f53 feat: refs #8497 added availabled on travel module 2025-02-17 14:50:22 +01:00
Alex Moreno e4c1ed1d5c Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 14:50:09 +01:00
Alex Moreno 97198591f9 build: add build-essential in back dockerfile
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 14:01:41 +01:00
Pablo Natek 38fa5cb823 Merge pull request 'fix(item_getBalance): refs #8408 alias for shipped field' (!3468) from 8408-Disponible-por-zonas-y-horas into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3468
Reviewed-by: Pablo Natek <pablone@verdnatura.es>
2025-02-17 12:18:25 +00:00
Pako Natek 991ed813e3 Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 12:15:48 +00:00
Pako Natek bd1a4b35aa fix(item_getBalance): refs #8408 alias for shipped field
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 13:12:22 +01:00
Alex Moreno 2e36d9b116 Merge pull request 'feat: refs #6695 add Dockerfile for MariaDB and update Jenkinsfile for database build stage' (!3464) from 6695-save_builds_and_Db into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3464
Reviewed-by: Juan Ferrer <juan@verdnatura.es>
2025-02-17 12:04:23 +00:00
Juan Ferrer 9e564a951a Merge branch '6695-save_builds_and_Db' of https://gitea.verdnatura.es/verdnatura/salix into 6695-save_builds_and_Db
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 13:00:34 +01:00
Juan Ferrer 40ed75450d ci: refs #6695 Jenkinsfile code refactor 2025-02-17 13:00:32 +01:00
Alex Moreno 2d7c677f0a Merge branch 'dev' into 6695-save_builds_and_Db
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 11:57:55 +00:00
Juan Ferrer 19df0d3cb0 ci: refs #6695 Jenkinsfile debug code removed
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 12:55:48 +01:00
Juan Ferrer 939181bd79 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 12:50:22 +01:00
Carlos Andrés 52c22608a5 Actualizar db/routines/vn/procedures/productionControl.sql
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 11:38:48 +00:00
Juan Ferrer b7342eace2 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 12:38:06 +01:00
Juan Ferrer 45e8a0bd8b ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 12:34:00 +01:00
Juan Ferrer 2e4fb91d77 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 12:20:36 +01:00
Juan Ferrer 2d94c8df39 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 12:17:49 +01:00
Juan Ferrer 7e7ee1ff12 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 11:33:56 +01:00
Juan Ferrer 64eefa95f5 ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 11:32:38 +01:00
Juan Ferrer 8b05bfb59f ci: refs #6695 Jenkinsfile fixes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 11:31:10 +01:00
Juan Ferrer 034c76c05f Merge branch '6695-save_builds_and_Db' of https: refs #6695//gitea.verdnatura.es/verdnatura/salix into 6695-save_builds_and_Db
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 11:29:50 +01:00
Pablo Natek e0c2a497d2 Merge pull request 'fix: refs #6897 change user field to nickname in getStockBought method' (!3465) from 6897-fixE2eAndStockBought into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3465
Reviewed-by: Javier Segarra <jsegarra@verdnatura.es>
2025-02-17 10:29:27 +00:00
Juan Ferrer 8387dad3d8 ci: refs #6695 Docker build changes 2025-02-17 11:28:01 +01:00
Alex Moreno bfddbfaa22 fix: refs #6695 update Jenkinsfile and docker-compose.yml for environment variable usage
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 10:54:38 +01:00
Pablo Natek 3093f8aef8 fix: refs #6897 change user field to nickname in getStockBought method
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 10:50:21 +01:00
Alex Moreno 4aa56a71cb refactor: refs #6695 uncommnet
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-17 08:56:55 +01:00
Alex Moreno 8ef13e3d9c feat: refs #6695 add Dockerfile for MariaDB and update Jenkinsfile for database build stage
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-17 08:34:04 +01:00
Alex Moreno b4bcfb2886 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-17 07:55:13 +01:00
Jorge Penadés f9d1a8c0c2 fix: refs #8388 update booking status in toUnbook method
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-14 14:23:43 +01:00
Jon Elias c8abd7b2a1 feat: refs #8606 show correct text of duplicate entry error
gitea/salix/pipeline/pr-dev Build queued... Details
2025-02-14 14:16:01 +01:00
Guillermo Bonet c79867188a Merge branch 'test' into dev
gitea/salix/pipeline/head There was a failure building this commit Details
2025-02-14 12:36:24 +01:00
Carlos Andrés a874b96c5e revert a119d9f7fb
gitea/salix/pipeline/head There was a failure building this commit Details
revert feat: invoiceIn move deductible field from head to lines
2025-02-14 11:32:45 +00:00
Carlos Andrés a119d9f7fb feat: invoiceIn move deductible field from head to lines
gitea/salix/pipeline/head This commit looks good Details
2025-02-14 12:29:56 +01:00
Guillermo Bonet 309a287748 Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-14 12:08:43 +01:00
Guillermo Bonet 28deadfbad fix: refs #8573 version
gitea/salix/pipeline/head This commit looks good Details
2025-02-14 10:37:08 +01:00
Javier Segarra 609060f100 Merge branch 'dev' into fix_getSuggestedTickets_byFixtures
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-14 08:45:27 +00:00
Alex Moreno 8110e88aa7 fix: vnUser, default false
gitea/salix/pipeline/head There was a failure building this commit Details
2025-02-14 08:29:55 +01:00
Ivan Mas 9000becf07 Merge pull request 'refactor: refs #8573 add fk to expedition.hostFk' (!3447) from 8573-addFkExpeditionHost into dev
gitea/salix/pipeline/head There was a failure building this commit Details
Reviewed-on: #3447
Reviewed-by: Guillermo Bonet <guillermo@verdnatura.es>
2025-02-14 07:06:21 +00:00
Ivan Mas 5a37bd332e Merge branch 'dev' into 8573-addFkExpeditionHost
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-14 06:55:47 +00:00
Alex Moreno 44ed6254c3 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-14 07:02:45 +01:00
Javier Segarra 9dd3f7198c fix: getSuggestedTickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-13 22:56:43 +01:00
Ivan Mas 13a76e5c70 Merge branch 'dev' into 8573-addFkExpeditionHost
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-13 15:43:56 +00:00
Ivan Mas caa921020d refactor: refs #8573 update before alter table
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-13 16:41:29 +01:00
Pako Natek 86091571cd Merge pull request 'fix(item_getBalance): refs #8408 availabled field prevails over landed' (!3449) from 8408-Disponible-por-zonas-y-horas into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3449
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-13 12:39:29 +00:00
Pablo Natek 4b7c20075b Merge pull request 'feat: refs #6897 add search method and enhance ACL permissions for Entry model' (!3448) from 6897-addItemSearch into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3448
Reviewed-by: Alex Moreno <alexm@verdnatura.es>
2025-02-13 07:59:58 +00:00
Pako Natek 3a1849326b fix(item_getBalance): refs #8408 availabled field prevails over landed
gitea/salix/pipeline/pr-dev This commit looks good Details
Refs: #8408
2025-02-13 08:41:13 +01:00
Pako Natek d773aec0f5 Merge pull request 'feat(productionControl and collection_new): refs #8575 new itempackingtype a' (!3444) from 8575-itemPackingType-Altillo into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3444
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-13 06:45:37 +00:00
Pako Natek 317c152c66 Merge branch 'dev' into 8575-itemPackingType-Altillo
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-13 06:41:43 +00:00
Pablo Natek 514ddf1045 feat: refs #6897 add search method and enhance ACL permissions for Entry model
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-12 19:40:22 +01:00
Ivan Mas f12c47cdf8 Merge branch 'dev' into 8573-addFkExpeditionHost
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-12 16:11:53 +00:00
Ivan Mas 5404f895b2 refactor: refs #8573 add fk to expedition.hostFk
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-12 17:08:45 +01:00
Javier Segarra 0b8a54d057 Merge pull request 'Fix TicketNegative' (!3446) from fix_ticketNegative into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3446
Reviewed-by: Jon Elias <jon@verdnatura.es>
2025-02-12 15:43:59 +00:00
Jon Elias 307c8d92df Merge branch 'dev' into fix_ticketNegative
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-12 15:30:09 +00:00
Javier Segarra f631aa1314 fix: remotMethodCtx
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-12 16:26:01 +01:00
Guillermo Bonet 8959eb21f6 Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-12 14:37:34 +01:00
Pako Natek b95db2eff1 feat(productionControl and collection_new): refs #8575 new itempackingtype a
gitea/salix/pipeline/pr-dev This commit looks good Details
Refs: #8575
2025-02-12 13:45:07 +01:00
Javi Gallego 1c8ad94ab8 fix: update SQL fixture values and enhance getVideoList method with transaction handling
gitea/salix/pipeline/head This commit looks good Details
2025-02-12 09:51:56 +01:00
Pablo Natek 8d0fec4ffd Merge pull request 'feat: refs #6897 add EntryConfig model and enhance entry filtering with new parameters' (!3366) from 6897-refactorEntryBuyList into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3366
Reviewed-by: Alex Moreno <alexm@verdnatura.es>
2025-02-12 06:37:43 +00:00
Pablo Natek c0b1f3337c Merge branch 'dev' into 6897-refactorEntryBuyList
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 15:50:28 +00:00
Pablo Natek f347d9668f refactor: refs #6897 improve variable scope and query parameters in recalcEntryPrices.js
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-11 16:50:00 +01:00
Guillermo Bonet f682e3cfe6 Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-11 13:26:18 +01:00
Pablo Natek 96248132a1 refactor: refs #6897 sql fixture data for improved readability and consistency
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 13:02:50 +01:00
Pablo Natek 454fbcb7ce Merge branch 'dev' of https: refs #6897//gitea.verdnatura.es/verdnatura/salix into 6897-refactorEntryBuyList
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-11 12:38:42 +01:00
Jon Elias 2e04a38c66 Merge branch 'dev' into 8555-AddNewField
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 11:24:17 +00:00
Jon Elias 02f51a244d feat: refs #8555 added new filter field
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 12:19:15 +01:00
Javier Segarra 5df24d0e70 Merge pull request '#6321 - Negative tickets' (!1945) from 6321_negative_tickets into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #1945
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-11 08:45:32 +00:00
Javier Segarra 095e561c82 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 08:43:21 +00:00
Pako Natek 0bd345b6de Merge pull request '8408-Disponible-por-zonas-y-horas' (!3432) from 8408-Disponible-por-zonas-y-horas into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3432
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-11 07:50:15 +00:00
Pako Natek b4fe620f2e Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 07:47:16 +00:00
Pako Natek 5a8f7b2c1a Merge branch '8408-Disponible-por-zonas-y-horas' of https://gitea.verdnatura.es/verdnatura/salix into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 08:46:26 +01:00
Pako Natek fbf56ff0cf fix(available_refresh): refs #8408 more availabled cases
refs#8408
2025-02-11 08:46:24 +01:00
Pablo Natek 5ab45831e7 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6897-refactorEntryBuyList 2025-02-11 08:19:31 +01:00
Pako Natek cdb91c06c2 Merge pull request '8408-Disponible-por-zonas-y-horas' (!3431) from 8408-Disponible-por-zonas-y-horas into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3431
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-11 07:16:38 +00:00
Pako Natek 1a92a00cce Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-11 07:12:25 +00:00
Pako Natek 5d674139fa Merge branch '8408-Disponible-por-zonas-y-horas' of https://gitea.verdnatura.es/verdnatura/salix into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-10 14:00:36 +01:00
Pako Natek ec5ef3d7f8 fix(item_getStock): refs #8408 field availabled used for itementryin selection
Refs: #8408
2025-02-10 14:00:34 +01:00
Pablo Natek 6110295cc2 fix: refs #6897 update entry_clone method to return newEntryId instead of result
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-10 11:40:54 +01:00
Pako Natek 4d5d38592f Merge pull request 'feat(catalog_calculate): refs #8408 new concept availabled' (!3425) from 8408-Disponible-por-zonas-y-horas into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3425
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-10 06:47:01 +00:00
Pako Natek f7f221e2bc Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-10 06:36:15 +00:00
Pako Natek 17749b0ced Merge branch '8408-Disponible-por-zonas-y-horas' of https://gitea.verdnatura.es/verdnatura/salix into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-10 07:34:19 +01:00
Pako Natek b60f251c56 fix(catalog_calculate): refs #8408 remove comments
resf#8408
2025-02-10 07:34:15 +01:00
Guillermo Bonet 0ecf1f281a Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-10 07:27:04 +01:00
Pablo Natek 2f0cd27ed8 Merge branch 'dev' of https: refs #6897//gitea.verdnatura.es/verdnatura/salix into 6897-refactorEntryBuyList
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-09 18:26:52 +01:00
Pablo Natek 6ea4e3096e feat: refs #6897 add maxLockTime parameter to entryConfig insert statement
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-09 18:24:39 +01:00
Javier Segarra e748c3ea68 feat: refs #6321 minor changes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 22:40:00 +01:00
Javier Segarra fce6b13d2d Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 16:19:58 +01:00
Javier Segarra 9bb273807d feat: refs #6321 i18n negativeReplaced 2025-02-07 16:18:26 +01:00
Guillermo Bonet 08140894a8 Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 14:06:28 +00:00
Guillermo Bonet d7a25b06bf Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-07 15:05:41 +01:00
Pako Natek e22a21290b Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 8408-Disponible-por-zonas-y-horas 2025-02-07 14:37:20 +01:00
Pako Natek adf416a086 fix: refs #8408 hour time on catalog_calculate
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 14:29:50 +01:00
Guillermo Bonet ec3210a5c0 Merge branch 'test' into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-07 14:28:28 +01:00
Pako Natek ca39edd010 fix: refs #8408 test
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 14:04:17 +01:00
Jon Elias f04933a9c1 Merge pull request 'Fix[ItemFixedPrice]: Fixed item name filter' (!3427) from Hotfix-FixedPriceNameFilter into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3427
Reviewed-by: Alex Moreno <alexm@verdnatura.es>
2025-02-07 12:44:42 +00:00
Jon Elias 516f409ae5 Merge branch 'dev' into Hotfix-FixedPriceNameFilter
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 12:42:20 +00:00
Jon Elias d76db10e67 fix: fixed item name filter
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 13:39:53 +01:00
Pako Natek 466a0a58d9 fix(catalog_calculate): refs #8408 delete commented lines
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
refs#8408
2025-02-07 12:59:41 +01:00
Pako Natek 74583d899a Merge branch 'dev' into 8408-Disponible-por-zonas-y-horas
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 11:53:30 +00:00
Pako Natek ba323bb7bf fix(fixtures.before): refs #8408 rollback changes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
refs#8408
2025-02-07 12:53:15 +01:00
Guillermo Bonet 21d9369250 fix: refs #8172 Version fix
gitea/salix/pipeline/head This commit looks good Details
2025-02-07 11:04:10 +01:00
Pako Natek a4f3975340 feat(catalog_calculate): refs #8408 new concept availabled
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
Refs: #8408
2025-02-07 10:40:00 +01:00
Guillermo Bonet 1a274648a7 Merge pull request 'refactor: refs #8172 Created table parkingCoordinates' (!3320) from 8172-parkingCoordinates into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3320
Reviewed-by: Carlos Andrés <carlosap@verdnatura.es>
2025-02-07 09:38:55 +00:00
Guillermo Bonet 6fd1c35819 Merge branch 'dev' into 8172-parkingCoordinates
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 10:36:35 +01:00
Javier Segarra f4dbddbe15 fix: refs #6321 dates in fixtures.before
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 09:54:55 +01:00
Javier Segarra ef4d639499 Merge pull request '#7601 - Different agency fot future tickets' (!3420) from 7601_futureAgency into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3420
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-07 08:32:47 +00:00
Javier Segarra 9322360979 fix: refs #6321 dates in fixtures.before
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 08:41:49 +01:00
Javier Segarra 47c3878a74 Merge branch 'dev' into 7601_futureAgency
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 07:06:54 +00:00
Javier Segarra e035a73e06 feat: refs #6321 i18n es negativeReplaced
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-07 07:57:10 +01:00
Javier Segarra d185530839 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-07 07:56:05 +01:00
Javier Segarra 9390c0efed test: refs #6321 getSimilar.spec.js 2025-02-07 07:53:00 +01:00
Robert Ferrús 860d86d19e Merge pull request '6702-missageSaleCloned' (!3404) from 6702-missageSaleCloned into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3404
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-07 06:51:05 +00:00
Javier Segarra c4e64db9b9 Merge branch '6321_negative_tickets' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 23:23:00 +01:00
Javier Segarra ba58746a03 fix: refs #6321 test 2025-02-06 23:22:58 +01:00
Javier Segarra 35bf9abcf4 Merge branch 'dev' into 7601_futureAgency
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 16:15:27 +00:00
Javier Segarra 8398a30e4f Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-06 14:58:55 +00:00
Javier Segarra 4e4d6c3b6a fix: refs #6321 fixtures
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-06 15:20:44 +01:00
Javier Segarra 9626b6c0ff feat: refs #6321 update itemLackDetail 2025-02-06 15:19:39 +01:00
Javier Segarra 338e833c0b feat: refs #6321 i18n 2025-02-06 15:19:23 +01:00
Javier Segarra 8170eafa36 feat: refs #6321 remove ticketConfig var 2025-02-06 15:19:07 +01:00
Robert Ferrús aa44d0e727 Merge branch 'dev' of https: refs #6702//gitea.verdnatura.es/verdnatura/salix into 6702-missageSaleCloned
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 14:31:37 +01:00
Guillermo Bonet 9f302fac24 Merge branch 'test' into dev
gitea/salix/pipeline/head There was a failure building this commit Details
2025-02-06 13:47:36 +01:00
Javier Segarra ab0667f85c feat: retrive requested fields
gitea/salix/pipeline/pr-dev Build queued... Details
2025-02-06 12:31:46 +00:00
Guillermo Bonet 110a5212c0 fix: refs #8535 deploy
gitea/salix/pipeline/head This commit looks good Details
2025-02-06 12:55:01 +01:00
Javier Segarra 4c786be3af Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 12:34:41 +01:00
Javier Segarra 4c7b8212da feat: refs #6321 changes 2025-02-06 12:27:59 +01:00
Javier Segarra 3dd64e4257 feat: refs #6321 sql lackDetail step3 2025-02-06 10:31:43 +01:00
Javier Segarra e736c95fb6 feat: refs #6321 sql lackDetail step2 2025-02-06 10:29:48 +01:00
Javier Segarra 1af01ad747 feat: refs #6321 sql lackDetail step1 2025-02-06 10:26:38 +01:00
Jorge Penadés 97be16b344 Merge pull request 'feat: refs #7119 add VehicleState model with data source configuration' (!3328) from 7119-createVehicle into dev
gitea/salix/pipeline/head There was a failure building this commit Details
gitea/salix/pipeline/pr-dev This commit looks good Details
Reviewed-on: #3328
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-06 09:24:09 +00:00
Jorge Penadés 9ba517b648 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 10:20:22 +01:00
Javier Segarra 24411f9af1 fix: refs #6321 revert change
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-06 08:17:03 +01:00
Javier Segarra b2cbded2dc feat: refs #6321 defaultAlertLevelCode
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-06 01:06:02 +01:00
Javier Segarra 1f6f7b9975 feat: refs #6321 updates requested
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-06 00:34:08 +01:00
Javier Segarra da90d43f7a Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-05 15:36:44 +01:00
Javier Segarra e02dcf23b7 feat: refs #6321 add columns ticketConfigs 2025-02-05 15:36:01 +01:00
Jose Antonio Tubau abf73f5705 Merge pull request 'feat: refs #8304 add ACL entry for WorkerDms with high privileges' (!3353) from 8304-workerChangesAndFixes into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3353
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-05 12:23:04 +00:00
Jose Antonio Tubau 0a25595ed5 Merge branch 'dev' into 8304-workerChangesAndFixes
gitea/salix/pipeline/pr-test This commit looks good Details
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-05 13:00:44 +01:00
Alex Moreno 1ee5af96a5 Merge branch 'test' of https://gitea.verdnatura.es/verdnatura/salix into dev
gitea/salix/pipeline/head This commit looks good Details
2025-02-05 11:16:32 +01:00
Alex Moreno f216166806 fix: refs #7943 better "relation": "business",
gitea/salix/pipeline/head This commit looks good Details
2025-02-05 10:16:14 +01:00
Pablo Natek 26faaad5b4 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6897-refactorEntryBuyList
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-05 08:03:48 +01:00
Jon Elias 502b48718c Merge pull request '#7965 - unifyProblems' (!2990) from 7965-unifyProblems into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #2990
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
Reviewed-by: Alex Moreno <alexm@verdnatura.es>
2025-02-05 06:51:54 +00:00
Pablo Natek e4cd30bc27 feat: refs #6897 add groupingMode and hasMinPrice parameters to getBuyList method
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-05 07:34:47 +01:00
Javier Segarra 55eb882754 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 23:42:47 +01:00
Javier Segarra 272c7c0289 perf: refs #6321 minor changes
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 23:42:16 +01:00
Jon Elias 55063e488c Merge pull request '#7127 modify days when adding lines to a claim' (!3195) from 7127-ModifyDaysToAddSales into dev
gitea/salix/pipeline/head This commit looks good Details
Reviewed-on: #3195
Reviewed-by: Javi Gallego <jgallego@verdnatura.es>
2025-02-04 13:45:20 +00:00
Alex Moreno da36016f5b build: init version
gitea/salix/pipeline/head This commit looks good Details
2025-02-04 14:43:31 +01:00
Javier Segarra 5d209314f6 feat: refs #6321 use Date.vnNew
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 14:09:48 +01:00
Jon Elias b3aaac2f52 Merge branch 'dev' into 7127-ModifyDaysToAddSales
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 13:00:07 +00:00
Javier Segarra 0111373471 Merge branch 'dev' of https: refs #6321//gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 14:00:00 +01:00
Jon Elias 5258e5ba2a refactor: refs #7127 modified checkAccessAcl instead of using VnRole
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-04 13:55:01 +01:00
Javier Segarra 550b0871f0 feat: refs #6321 changes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-04 13:45:59 +01:00
Javier Segarra 410f3e73dc Merge branch 'dev' of https: refs #6321//gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-04 09:54:41 +01:00
Pablo Natek 13d9cac340 test: refs #6897 update expected results in item and tag filter tests
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-03 14:19:39 +01:00
Robert Ferrús 14a48bf4c6 Merge branch 'dev' into 6702-missageSaleCloned
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-03 12:37:12 +00:00
Pablo Natek 8f3bf46165 Merge branch 'dev' of https: refs #6897//gitea.verdnatura.es/verdnatura/salix into 6897-refactorEntryBuyList
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-03 13:36:20 +01:00
Pablo Natek e7dd1f6a58 feat: refs #6897 add recalcEntryPrices method and enhance ACL permissions for entry operations
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-02-03 13:16:48 +01:00
Robert Ferrús d79c19d92a feat: refs #6702 delete line
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-03 12:42:38 +01:00
Robert Ferrús 0646d2e817 feat: refs #6702 changes request
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-03 12:41:51 +01:00
Robert Ferrús d98476b777 Merge branch 'dev' into 6702-missageSaleCloned
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-02-03 06:11:47 +00:00
Robert Ferrús 1a8ce32833 Merge branch 'dev' into 6702-missageSaleCloned
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 12:17:08 +00:00
Robert Ferrús 9d3a4257b7 feat: refs #6702 translations
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 12:53:48 +01:00
Robert Ferrús f484c3e653 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6702-missageSaleCloned 2025-01-31 11:59:00 +01:00
Javier Segarra 49c6df42a7 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 10:20:42 +00:00
Robert Ferrús 948bdbd2c6 feat: refs #6702 refs #6701 changes
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 08:19:39 +01:00
Robert Ferrús ce447b7de6 Merge branch 'dev' of https: refs #6702//gitea.verdnatura.es/verdnatura/salix into 6702-missageSaleCloned
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 07:14:10 +01:00
Robert Ferrús 1e18e48f8c feat: refs #6702 es.json delete changes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-31 07:09:42 +01:00
Javier Segarra 767c891317 perf: refs #6321 remove comments
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-31 01:17:23 +01:00
Javier Segarra 2574e59c71 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-31 01:07:53 +01:00
Javier Segarra cd7add3497 feat: refs #6321 debug 2025-01-31 01:07:39 +01:00
Javier Segarra 7fdd3d1eb8 feat: refs #6321 fix methods 2025-01-31 01:07:28 +01:00
Javier Segarra 9791f3b935 fix: refs #6321 fixtures 2025-01-31 01:06:57 +01:00
Javier Segarra b8894ca67d feat: refs #6321 i18n replaceItem 2025-01-31 01:04:34 +01:00
Javier Segarra 75b4202a7b feat: refs #6321 remove origin 2025-01-31 01:04:14 +01:00
Jorge Penadés 77623b489b feat: refs #7119 add new vehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-30 10:55:36 +01:00
Jorge Penadés a0dc8f8104 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-30 10:26:40 +01:00
Javier Segarra dc6f93c241 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-30 08:54:04 +01:00
Javier Segarra 811feb9fee feat: refs #6321 tour 2025-01-29 23:46:19 +01:00
Jorge Penadés 2f364ebc76 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-29 16:40:37 +01:00
Jorge Penadés a463a8f4d8 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-29 14:49:07 +01:00
Javier Segarra ac053814e6 test: refs #6321 fixing test 2025-01-29 12:26:42 +01:00
Jorge Penadés 2b443266b2 Merge branch 'dev' of https: refs #7119//gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-29 11:51:21 +01:00
Javier Segarra a93e8b28db fix: refs #6321 getSimilar
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-28 23:37:56 +01:00
Jorge Penadés 051c6ffcbc refactor(vehicle filter): refs #7119 simplify search condition to use logical OR for ID and number plate
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-28 12:23:24 +01:00
Jorge Penadés 2eabfaaa8f Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-28 11:44:59 +01:00
Javier Segarra 10eef6d1b6 feat: refs #6321 updates
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-28 08:45:00 +01:00
Javier Segarra 1a0992da78 feat: refs #6321 changes 2025-01-27 12:04:18 +01:00
Pablo Natek ef5c2ab3a2 feat: refs #6897 add cloneEntry and deleteEntry methods with corresponding ACL permissions
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-27 08:11:41 +01:00
Javier Segarra bd54eacda1 feat: refs #6321 alternative alertLevel 2025-01-26 02:36:39 +01:00
Javier Segarra 36192c14ec Merge branch 'dev' into 6321_negative_tickets 2025-01-25 09:10:45 +01:00
Jorge Penadés fb653c1eca fix: refs #7119 rollback
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-24 16:42:10 +01:00
Jorge Penadés ec7bbd13b1 refactor: refs #7119 update vehicle filter logic and improve test cases
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-24 16:37:35 +01:00
Jorge Penadés 2346205072 Merge branch 'dev' of https: refs #7119//gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-24 16:14:09 +01:00
Jorge Penadés 3fba81b41e feat: refs #7119 add 'id' filter option and enhance bank policy model with dmsFk field
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-24 10:46:35 +01:00
Jorge Penadés f8e4561a59 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-24 09:37:10 +01:00
Jorge Penadés 3a449896cd feat: refs #7119 update vehicle permissions and enhance vehicle model with new fields
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-23 18:02:53 +01:00
Jose Antonio Tubau e22a472e97 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 8304-workerChangesAndFixes
gitea/salix/pipeline/pr-test There was a failure building this commit Details
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-23 15:06:03 +01:00
Javier Segarra c3361fd49b Merge branch 'dev' into 6321_negative_tickets 2025-01-23 14:48:28 +01:00
Jorge Penadés 3ea67675da feat: refs #7119 add VehicleType model and update vehicle permissions for delivery roles
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-23 10:53:53 +01:00
Jorge Penadés 0440582e2b Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-23 10:09:47 +01:00
Jose Antonio Tubau d3b22ce6c9 Merge branch '8304-workerChangesAndFixes' of https://gitea.verdnatura.es/verdnatura/salix into 8304-workerChangesAndFixes
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-22 16:17:38 +01:00
Jose Antonio Tubau e05f740c3d Merge branch 'dev' into 8304-workerChangesAndFixes 2025-01-22 16:17:35 +01:00
Jose Antonio Tubau 171296f2db Merge branch 'dev' into 8304-workerChangesAndFixes
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-22 11:43:14 +00:00
Jorge Penadés 49e08fbf45 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle 2025-01-22 12:42:25 +01:00
Javier Segarra 2a4bad5034 Merge branch 'dev' into 6321_negative_tickets 2025-01-21 23:43:07 +01:00
Jorge Penadés 2ae0097fa8 feat: refs #7119 add vehicle type management and delivery role permissions
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-21 15:48:20 +01:00
Jose Antonio Tubau 7f5224ebca feat: refs #8304 add ACL entries for Business and Worker models and update worker model relationships
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-21 14:45:13 +01:00
Jose Antonio Tubau b20dee8382 feat: refs #8304 add notes field to business model
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-21 12:28:34 +01:00
Jorge Penadés 55a0a5bc9a feat: refs #7119 add delivery role permissions
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-21 11:52:09 +01:00
Jorge Penadés e343e457ee Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-21 10:48:08 +01:00
Jorge Penadés 3d2a7cfb66 feat: refs #7119 update ACLs for vehicle management and refine vehicle filter logic
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-20 18:05:33 +01:00
Jorge Penadés 946d08e543 feat: refs #7119 update ACLs and remove unused models for vehicle management
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-20 17:29:18 +01:00
Javier Segarra 1560c48af2 feat: refs #6321 improve query 2025-01-20 14:32:26 +01:00
Jorge Penadés ecedc84164 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-20 14:28:30 +01:00
Pablo Natek f8a156b7ab feat: refs #6897 add EntryConfig model and enhance entry filtering with new parameters
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-17 08:11:18 +01:00
Carlos Andrés f79bc2a665 Merge branch 'dev' into 7965-unifyProblems
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-15 17:37:32 +00:00
Carlos Andrés 948ad3a752 Merge branch 'dev' into 7965-unifyProblems
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-15 10:16:25 +00:00
Javier Segarra 84dfdcb79a Merge branch 'dev' into 6321_negative_tickets 2025-01-14 12:43:25 +01:00
Jose Antonio Tubau 0e8d9137ed feat: refs #8304 add privilege check for WorkerDms filter method
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-10 13:48:03 +01:00
Jose Antonio Tubau a167e7fada feat: refs #8304 add ACL entry for WorkerDms with high privileges
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-10 13:42:29 +01:00
Carlos Andrés 2d8bda2b56 fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-09 15:46:04 +01:00
Carlos Andrés 09b4b2cf1b fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-09 11:52:30 +01:00
Carlos Andrés 638a8e344d fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-09 10:34:49 +01:00
Carlos Andrés 8f99b14510 fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-09 08:43:14 +01:00
Carlos Andrés 71fcce4ed7 Merge branch 'dev' into 7965-unifyProblems
gitea/salix/pipeline/pr-dev This commit looks good Details
2025-01-09 08:13:42 +01:00
Robert Ferrús eca2ff84ef feat: refs #6702 changes
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2025-01-02 07:49:49 +01:00
Jorge Penadés bc495ed51e feat: refs #7119 add VehicleNotes model and update vehicle filter SQL query
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-30 17:55:27 +01:00
Jorge Penadés 68e42206c8 feat: refs #7119 enhance vehicle filter method with additional parameters and improve SQL query structure
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-12-30 17:31:56 +01:00
Jorge Penadés e9b0b1b7a3 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-30 15:56:57 +01:00
Jorge Penadés 8a6482ada5 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-30 09:31:19 +01:00
Jorge Penadés 921edb238c feat: refs #7119 add Ppe model and establish relationships in Vehicle model
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-27 16:55:59 +01:00
Jorge Penadés 0de4ce9b3c feat: refs #7119 add BankPolicy and FuelType models
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-27 15:36:19 +01:00
Jorge Penadés 9731d13a9a feat: refs #7119 add updateAttributes ACL
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-27 12:21:46 +01:00
Jorge Penadés 4d7387af18 feat: refs #7119 add deleteById permission for deliveryBoss role in ACL for Vehicle model
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-26 17:52:36 +01:00
Jorge Penadés b01e4894c9 feat: refs #7119 remove vehicleStateFk argument from filter method and simplify search logic
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-26 17:02:37 +01:00
Jorge Penadés f8c1e2aacf feat: refs #7119 add search and filter capabilities to Vehicle model and update related SQL fixtures
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-26 16:36:50 +01:00
Jorge Penadés fffd095ab3 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7119-createVehicle
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-26 09:43:39 +01:00
Jorge Penadés 9aa790ecfd feat: refs #7119 add isKmTruckRate field to Vehicle model and filter
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-24 13:27:32 +01:00
Jorge Penadés 39fc196464 feat: refs #7119 add VehicleEvent and VehicleState models with associated methods and SQL fixtures
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-24 13:01:23 +01:00
Jorge Penadés 086b7aed4b feat: refs #7119 add VehicleState model with data source configuration
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-12-24 10:40:17 +01:00
Robert Ferrús 9b2fb7a430 feat: refs #6702 sale fix Self.ticketWeekly 2024-12-19 11:32:15 +01:00
Robert Ferrús 0652d11112 feat: refs #6702 updatePrice message 2024-12-19 10:29:50 +01:00
Guillermo Bonet a8cf01ca19 refactor: refs #8172 Removed column and row columns
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-12-18 14:36:59 +01:00
Guillermo Bonet 6300795e52 refactor: refs #8172 Created table parkingCoordinates
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-12-18 13:17:47 +01:00
Robert Ferrús 46c4f4786a feat: refs #6702 saleCloned 2024-12-18 12:03:48 +01:00
Javier Segarra 53298bd9ca Merge branch 'dev' into 6321_negative_tickets 2024-12-09 14:24:34 +01:00
Jon Elias 8fedd78938 fix: refs #7965 filter and sales back tests
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-11-29 12:47:24 +01:00
Jon Elias 568661021a feat: refs #7965 added #6242 back to unify PR
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-11-29 11:54:53 +01:00
Jon Elias 632c1b2fc7 Merge branch 'dev' of https: refs #7965//gitea.verdnatura.es/verdnatura/salix into 7965-unifyProblems
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-11-29 11:51:21 +01:00
Jon Elias 9113f2e3e5 feat: refs #7127 modify days when adding lines to a claim
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-11-13 08:30:09 +01:00
Carlos Andrés 4e0a81464c fix: test
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-10-02 15:12:57 +02:00
Carlos Andrés 99c70a533a fix: test
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-10-02 14:25:27 +02:00
Carlos Andrés 5f230ff122 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 7965-unifyProblems 2024-10-02 14:23:53 +02:00
Carlos Andrés 6a5cf1a57f fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-10-01 15:46:51 +02:00
Carlos Andrés bb1695eac6 fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-30 16:39:55 +02:00
Carlos Andrés af4d1ad513 Merge branch 'dev' into 7965-unifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-30 14:31:48 +02:00
Carlos Andrés 69cafd5a04 fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-30 14:30:15 +02:00
Javier Segarra 62dd5cb675 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-24 22:12:21 +02:00
Javier Segarra c6490f6740 feat(salix): refs #6321 #6321 fixtures.before 2024-09-24 22:12:11 +02:00
Javier Segarra c876022fe5 feat(salix): refs #6321 #6321 TODO 2024-09-24 13:54:32 +02:00
Javier Segarra 2cb57225ff perf(salix): refs #6321 #7677 itemLackDetail 2024-09-21 00:26:55 +02:00
Carlos Andrés 712bfe7368 fix: refs #7965 UnifyProblems
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-19 20:15:38 +02:00
Javier Segarra 5e38d18fed Merge branch 'dev' into 6321_negative_tickets 2024-09-19 09:34:55 +02:00
Javier Segarra a93dd79fe2 Merge branch 'dev' into 6321_negative_tickets 2024-09-19 00:00:42 +02:00
Javier Segarra 36297009e1 perf(salix): refs #6321 #7677 itemLackDetail 2024-09-17 16:43:09 +02:00
Carlos Andrés 750b83bb45 fix: refs #7760 tmp.ticketIPT
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-16 19:03:12 +02:00
Carlos Andrés 91fac11fa0 fix: refs #7760 tmp.ticketIPT 2024-09-16 18:21:12 +02:00
Javier Segarra 03fcabd7f6 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-09-16 09:58:20 +02:00
Javier Segarra e76e2a15f2 feat(salix): refs #6321 #6321 TODO 2024-09-13 09:44:09 +02:00
Javier Segarra 09a7918ab3 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets 2024-09-13 08:51:28 +02:00
Javier Segarra fb851c3bdd feat: refs #6321 implement VnTable 2024-09-12 13:33:22 +02:00
Javier Segarra 94f99ccee1 fix(salix): refs #6321 #6321 remove ticketMethod clone 2024-09-11 11:58:58 +02:00
Javier Segarra 844e96583b Merge branch 'dev' of https: refs #6321//gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets 2024-09-11 08:45:42 +02:00
Javier Segarra f77163102c Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-07-23 10:43:12 +02:00
Javier Segarra b2d58a1d6f Merge branch 'dev' into 6321_negative_tickets 2024-07-22 17:30:25 +02:00
Javier Segarra 0e97c453ed Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-07-22 16:47:05 +02:00
Javier Segarra 7ec47f2f80 Merge branch 'dev' into 6321_negative_tickets 2024-07-22 10:21:50 +02:00
Javier Segarra c9c9d5973d test(salix): refs #6321 #6321 fix test
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-07-20 00:36:59 +02:00
Javier Segarra 7e8c2eebe5 feat: refs #6321 restore fixtures.before.sql 2024-07-19 19:38:21 +02:00
Javier Segarra 212f84aa9b revert commit 2024-07-19 11:41:33 +02:00
Javier Segarra 48b8bda49a Merge branch 'dev' into 6321_negative_tickets 2024-07-19 09:42:42 +02:00
Javier Segarra ac7c28cd27 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-07-18 22:02:22 +02:00
Javier Segarra 99efdffe58 feat(salix): refs #6321 #6321 retrieve observationType 2024-07-04 09:39:06 +02:00
Javier Segarra 8b72b7211e feat(salix): refs #7380 #7380 new typeObservation
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-07-03 23:06:13 +02:00
Javier Segarra 14e14eea2a feat(salix): refs #7380 #7380 client.substitutionAllowed new field 2024-07-03 23:06:06 +02:00
Javier Segarra b5ea2f12ff Merge remote-tracking branch 'origin/dev' into 6321_negative_tickets 2024-07-03 23:05:42 +02:00
Javier Segarra 37de252e15 Merge branch 'dev' into 6321_negative_tickets 2024-07-02 12:22:35 +02:00
Javier Segarra a1c48974c9 Merge branch 'dev' into 6321_negative_tickets 2024-06-20 12:21:24 +02:00
Javier Segarra e87c8ee5a7 feat(Salix): refs #6321 #6427 change url endpoint 2024-06-18 13:17:29 +02:00
Javier Segarra dba76a4f6b test(Salix): refs #6321 #6321 add default items as Proposal 2024-06-17 12:38:36 +02:00
Javier Segarra e45ac6424c perf(salix): refs #6321 #6321 updates 2024-06-14 11:43:06 +02:00
Javier Segarra ab85b8e703 Merge branch 'dev' into 6321_negative_tickets 2024-06-12 22:37:14 +02:00
Javier Segarra 2cbd610bc2 perf(salix): refs #6321 #7563 add ink.showOrder to procedure 2024-06-12 22:19:59 +02:00
Javier Segarra 02bc3afcda Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-06-11 13:53:30 +02:00
Javier Segarra 64a4a78308 feat(salix): refs #6321 updates 2024-06-10 17:09:25 +02:00
Javier Segarra 149aeac54e Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-06-04 09:26:15 +02:00
Javier Segarra 7468f87808 feat(salix): refs #6321 #6321 improve split mehtod
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-30 07:46:10 +02:00
Javier Segarra aab7a7ec73 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-28 12:45:50 +02:00
Javier Segarra 4fe1d80e7c feat(salix): refs #6321 default value when days is not present 2024-05-24 14:00:41 +02:00
Javier Segarra 8366cfa348 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-24 11:12:52 +02:00
Javier Segarra 70f245fd2d Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-15 16:25:44 +02:00
Javier Segarra 3401f0d745 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-15 08:55:10 +02:00
Javier Segarra befc128950 feat(salix): refs #6321 Sale_itemReplace 2024-05-15 08:36:13 +02:00
Javier Segarra 5c0b25bb30 Merge branch '6321_negative_tickets' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-13 13:51:20 +02:00
Javier Segarra 888f15049a feat(salix): refs #6321 #6321 New arg 2024-05-13 13:51:04 +02:00
Jorge Penadés cfea648103 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-13 13:25:52 +02:00
Javier Segarra e30c66313f Merge branch 'dev' into 6321_negative_tickets 2024-05-08 12:29:06 +02:00
Javier Segarra de7469419a feat(salix): refs #6321 #6321 getSimilar minor update 2024-05-03 07:23:12 +02:00
Javier Segarra 7caea44427 feat(salix): refs #6321 #6321 getSimilar
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-02 13:53:28 +02:00
Javier Segarra 63d07cb082 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-05-02 12:47:59 +02:00
Javier Segarra cb76075bf8 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-29 11:55:39 +02:00
Javier Segarra d638e31a1a Merge branch 'dev' into 6321_negative_tickets 2024-04-23 19:15:45 +02:00
Javier Segarra caaa4fdd30 Merge remote-tracking branch 'origin/dev' into 6321_negative_tickets 2024-04-23 11:45:59 +02:00
Javier Segarra 68158f341d feat(salix): refs #6321 #6331 publish negativeOrigin model 2024-04-22 14:09:27 +02:00
Javier Segarra 5a5032f6e6 Merge remote-tracking branch 'origin/dev' into 6321_negative_tickets 2024-04-22 13:34:06 +02:00
Javier Segarra 41f0b6aa93 Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-16 05:13:35 +00:00
Javier Segarra e0712645a2 refs #6321 test: fix
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-08 12:17:32 +02:00
Javier Segarra 3dd162b683 refs #6321 test: fix
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-08 12:16:07 +02:00
Javier Segarra 25fc39ef2b refs #6321 perf: change descriptions
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-08 11:33:32 +02:00
Javier Segarra 134c468589 Merge branch 'dev' into 6321_negative_tickets 2024-04-08 11:30:48 +02:00
Javier Segarra c4f8734d44 refs #6321 fix: param
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-03 09:48:56 +02:00
Javier Segarra cc3f2da639 refs #6321 perf: minor change
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-02 15:02:33 +02:00
Javier Segarra 586f37afd2 refs #6321 perf: add arguments into procedure 2024-04-02 13:28:26 +02:00
Juanjo Breso 9a80f8c2ce minor change 2024-04-02 10:45:29 +02:00
Javier Segarra d08535ac18 refs #6321 minor changes
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-04-02 08:04:27 +02:00
Javier Segarra 5d24844256 refs #6321 test: debug use TIMEOUT
gitea/salix/pipeline/pr-dev This commit looks good Details
2024-04-01 16:12:52 +02:00
Javier Segarra 601f5db080 refs #6321 test: spliy 2024-04-01 16:11:30 +02:00
Javier Segarra 59498179ec refs #6321 test: itemLackDetail 2024-04-01 14:05:45 +02:00
Javier Segarra d225821a41 refs #6321 test: itemLack
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-04-01 13:59:35 +02:00
Javier Segarra d62c55dc9f refs #6321 test: negativeOrigin 2024-04-01 13:11:58 +02:00
Javier Segarra 2cff160c6a Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-28 23:50:51 +00:00
Javier Segarra 65a6174e2b refs #6321 updates
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-28 12:01:06 +01:00
Javier Segarra e6fe245b27 refs #6321 feat: new split method
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-27 14:09:25 +01:00
Javier Segarra d8d0ced918 Merge branch 'dev' into 6321_negative_tickets 2024-03-27 09:54:02 +01:00
Javier Segarra a943e39ba7 refs #6321 feat: negativeOrigin 2024-03-22 22:44:37 +01:00
Javier Segarra e085bc7f1e Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-21 07:47:39 +01:00
Javier Segarra 44c4e6a16e Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-15 09:47:10 +01:00
Javier Segarra 6c0706cc56 refs #6321 perf: query to retrieve results 2024-03-15 09:33:06 +01:00
Javier Segarra f83f7808c8 refs #6321 feat: negativeOrigin method 2024-03-15 09:32:36 +01:00
Javier Segarra 6a12af2eb9 refs #6321 feat: add producerFk
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-14 15:26:30 +01:00
Javier Segarra ed6b25455b refs #5858 feat: improve itemLackDetail 2024-03-13 14:27:45 +01:00
Javier Segarra c8446eb9a1 refs #6321 perf: updatemethod
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-13 09:20:32 +01:00
Javier Segarra d1e7e13333 refs #6321 feat: acl
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-06 14:37:28 +01:00
Javier Segarra 7c8fa52da0 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets 2024-03-06 14:30:02 +01:00
Javier Segarra 871447cc6e refs #6321 feat: updates
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-03-05 08:07:54 +01:00
Javier Segarra bc09ad7da7 Merge branch 'dev' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets 2024-01-29 11:07:31 +01:00
Javier Segarra 1a21dda00b refs #6321 feat itemLackDetail
gitea/salix/pipeline/head There was a failure building this commit Details
gitea/salix/pipeline/pr-dev There was a failure building this commit Details
2024-01-29 09:55:44 +01:00
Javier Segarra 895d9bff64 refs #6321 feat itemLAck with SQL 2024-01-29 09:55:34 +01:00
Javier Segarra f49444c19f Merge branch '6321_negative_tickets' of https://gitea.verdnatura.es/verdnatura/salix into 6321_negative_tickets
gitea/salix/pipeline/head There was a failure building this commit Details
2024-01-23 09:42:59 +01:00
Javier Segarra 48d9a3934a Merge branch 'dev' into 6321_negative_tickets
gitea/salix/pipeline/head This commit looks good Details
2024-01-22 19:29:47 +00:00
Javier Segarra 2bcb6366b2 refs #6321 feat: vCustomWhere 2024-01-22 10:10:38 +01:00
Javier Segarra 0111aa1b75 refs #6321 feat: fixtures and update procedure 2024-01-22 09:56:10 +01:00
Javier Segarra 91f5ee3b93 refs #6321 feat: new remoteMethod
gitea/salix/pipeline/head This commit looks good Details
2024-01-20 12:29:41 +01:00
106 changed files with 5147 additions and 2776 deletions

137
Jenkinsfile vendored
View File

@ -8,6 +8,7 @@ def RUN_BUILD
def BRANCH_ENV = [
test: 'test',
master: 'production',
main: 'production',
beta: 'production'
]
@ -20,12 +21,14 @@ node {
'dev',
'test',
'master',
'main',
'beta'
].contains(env.BRANCH_NAME)
FROM_GIT = env.JOB_NAME.startsWith('gitea/')
RUN_TESTS = !PROTECTED_BRANCH && FROM_GIT
RUN_BUILD = PROTECTED_BRANCH && FROM_GIT
IS_LATEST = ['master', 'main'].contains(env.BRANCH_NAME)
// https://www.jenkins.io/doc/book/pipeline/jenkinsfile/#using-environment-variables
echo "NODE_NAME: ${env.NODE_NAME}"
@ -73,6 +76,7 @@ pipeline {
def packageJson = readJSON file: 'package.json'
def version = "${packageJson.version}-build${env.BUILD_ID}"
writeFile(file: 'VERSION.txt', text: version)
echo "VERSION: ${version}"
}
}
}
@ -105,93 +109,72 @@ pipeline {
}
}
}
stage('Stack') {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
parallel {
stage('Back') {
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
steps {
sh 'node back/tests.js --junit'
}
post {
always {
junit(
testResults: 'junitresults.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when {
expression { RUN_BUILD }
}
environment {
VERSION = readFile 'VERSION.txt'
}
steps {
sh 'docker-compose build back'
}
steps {
sh 'node back/tests.js --junit'
}
post {
always {
junit(
testResults: 'junitresults.xml',
allowEmptyResults: true
)
}
}
}
stage('Front') {
when {
expression { FROM_GIT }
steps {
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=10'
}
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
steps {
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=10'
}
post {
always {
junit(
testResults: 'junit.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when {
expression { RUN_BUILD }
}
environment {
VERSION = readFile 'VERSION.txt'
}
steps {
sh 'gulp build'
sh 'docker-compose build front'
}
post {
always {
junit(
testResults: 'junit.xml',
allowEmptyResults: true
)
}
}
}
}
}
stage('Push') {
stage('Build') {
when {
expression { RUN_BUILD }
}
environment {
CREDENTIALS = credentials('docker-registry')
VERSION = readFile 'VERSION.txt'
CREDENTIALS = credentials('docker-registry')
}
steps {
sh 'docker login --username $CREDENTIALS_USR --password $CREDENTIALS_PSW $REGISTRY'
sh 'docker-compose push'
parallel {
stage('Back') {
steps {
dockerBuild 'salix-back', '.', 'back/Dockerfile'
}
}
stage('Front') {
steps {
sh 'gulp build'
dockerBuild 'salix-front', 'front'
}
}
stage('DB') {
steps {
sh 'npx myt run -t'
sh 'docker exec vn-database sh -c "rm -rf /mysql-template"'
sh 'docker exec vn-database sh -c "cp -a /var/lib/mysql /mysql-template"'
sh 'docker commit vn-database salix-db:$VERSION'
sh 'docker rm -f vn-database'
dockerPush docker.image("salix-db:${VERSION}")
}
}
}
}
stage('Deploy') {
@ -264,3 +247,19 @@ pipeline {
}
}
}
def dockerBuild(imageName, context, dockerfile = null) {
if (dockerfile == null)
dockerfile = "${context}/Dockerfile"
def baseImage = "${imageName}:${env.VERSION}"
def image = docker.build(baseImage, "-f ${dockerfile} ${context}")
dockerPush(image)
}
def dockerPush(image) {
docker.withRegistry("https://${env.REGISTRY}", 'docker-registry') {
image.push()
image.push(env.BRANCH_NAME)
if (IS_LATEST) image.push('latest')
}
}

View File

@ -25,7 +25,7 @@ RUN apt-get update \
libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 \
libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 \
libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 \
libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 \
libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 build-essential \
fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget
# Extra dependencies

View File

@ -54,7 +54,8 @@
"type": "string"
},
"hasGrant": {
"type": "boolean"
"type": "boolean",
"default": false
},
"passExpired": {
"type": "date"
@ -168,6 +169,7 @@
"emailVerified",
"twoFactor"
]
}
}
}

View File

@ -77,8 +77,8 @@ INSERT INTO `vn`.`agency` (`name`, `warehouseFk`, `isOwn`, `isAnyVolumeAllowed`)
('Otra agencia ', '1', '0', '0');
INSERT INTO `vn`.`expedition` (`agencyModeFk`, `ticketFk`, `isBox`, `counter`, `workerFk`, `externalId`, `packagingFk`, `hostFk`, `itemPackingTypeFk`, `hasNewRoute`) VALUES
('1', '1', 1, '1', '1', '1', '1', 'pc00', 'F', 0),
('1', '1', 1, '2', '1', '1', '1', 'pc00', 'F', 0);
('1', '1', 1, '1', '1', '1', '1', 'pc1', 'F', 0),
('1', '1', 1, '2', '1', '1', '1', 'pc1', 'F', 0);
INSERT INTO vn.client (id,name,defaultAddressFk,street,fi,email,dueDay,isTaxDataChecked,accountingAccount,city,provinceFk,postcode,socialName,contact,credit,countryFk,quality,riskCalculated) VALUES
(100,'root',110,'Valle de la muerte','74974747G','root@mydomain.com',0,1,'4300000078','ALGEMESI',1,'46680','rootSocial','rootContact',500.0,1,10,'2025-01-01');

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@ CREATE OR REPLACE DEFINER=`root`@`localhost` PROCEDURE `cache`.`available_refres
OUT `vCalc` INT,
`vRefresh` INT,
`vWarehouse` INT,
`vDated` DATE
`vAvailabled` DATETIME
)
proc: BEGIN
DECLARE vStartDate DATE;
@ -12,6 +12,7 @@ proc: BEGIN
DECLARE vInventoryDate DATE;
DECLARE vLifeScope DATE;
DECLARE vWarehouseFkInventory INT;
DECLARE vDated DATE;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
@ -19,13 +20,17 @@ proc: BEGIN
RESIGNAL;
END;
IF vDated < util.VN_CURDATE() THEN
IF vAvailabled < util.VN_CURDATE() THEN
LEAVE proc;
END IF;
SET vDated = DATE(vAvailabled);
SET vAvailabled = vDated + INTERVAL HOUR(vAvailabled) HOUR;
CALL vn.item_getStock(vWarehouse, vDated, NULL);
SET vParams = CONCAT_WS('/', vWarehouse, vDated);
SET vParams = CONCAT_WS('/', vWarehouse, vAvailabled);
CALL cache_calc_start (vCalc, vRefresh, 'available', vParams);
IF !vRefresh THEN
@ -84,14 +89,13 @@ proc: BEGIN
AND (ir.ended IS NULL OR i.shipped <= ir.ended)
AND i.warehouseFk = vWarehouse
UNION ALL
SELECT i.itemFk, i.landed, i.quantity
SELECT i.itemFk, IFNULL(i.availabled, i.landed), i.quantity
FROM vn.itemEntryIn i
JOIN itemRange ir ON ir.itemFk = i.itemFk
LEFT JOIN edi.warehouseFloramondo wf ON wf.entryFk = i.entryFk
WHERE i.landed >= vStartDate
AND (ir.ended IS NULL OR i.landed <= ir.ended)
WHERE IFNULL(i.availabled, i.landed) >= vStartDate
AND IFNULL(i.availabled, i.landed) <= vAvailabled
AND (ir.ended IS NULL OR IFNULL(i.availabled, i.landed) <= ir.ended)
AND i.warehouseInFk = vWarehouse
AND ISNULL(wf.entryFk)
UNION ALL
SELECT i.itemFk, i.shipped, i.quantity
FROM vn.itemEntryOut i

View File

@ -19,13 +19,15 @@ BEGIN
* @return tmp.ticketComponentPrice
*/
DECLARE vAvailableCalc INT;
DECLARE vAvailableNoRaidsCalc INT;
DECLARE vAvailabled DATETIME;
DECLARE vDone BOOL;
DECLARE vHour INT;
DECLARE vShipped DATE;
DECLARE vWarehouseFk SMALLINT;
DECLARE vZoneFk INT;
DECLARE vDone BOOL;
DECLARE cTravelTree CURSOR FOR
SELECT zoneFk, warehouseFk, shipped FROM tmp.zoneGetShipped;
SELECT zoneFk, warehouseFk, shipped, `hour` FROM tmp.zoneGetShipped;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
@ -66,14 +68,15 @@ BEGIN
OPEN cTravelTree;
l: LOOP
SET vDone = FALSE;
FETCH cTravelTree INTO vZoneFk, vWarehouseFk, vShipped;
FETCH cTravelTree INTO vZoneFk, vWarehouseFk, vShipped, vHour;
SET vAvailabled = vShipped + INTERVAL HOUR(vHour) HOUR;
IF vDone THEN
LEAVE l;
END IF;
CALL `cache`.available_refresh(vAvailableCalc, FALSE, vWarehouseFk, vShipped);
CALL `cache`.availableNoRaids_refresh(vAvailableNoRaidsCalc, FALSE, vWarehouseFk, vShipped);
CALL `cache`.available_refresh(vAvailableCalc, FALSE, vWarehouseFk, vAvailabled);
CALL buy_getUltimate(NULL, vWarehouseFk, vShipped);
INSERT INTO tmp.ticketLot (warehouseFk, itemFk, available, buyFk, zoneFk)
@ -83,31 +86,10 @@ BEGIN
bu.buyFk,
vZoneFk
FROM `cache`.available a
LEFT JOIN cache.availableNoRaids anr ON anr.item_id = a.item_id
AND anr.calc_id = vAvailableNoRaidsCalc
JOIN tmp.item i ON i.itemFk = a.item_id
JOIN item it ON it.id = i.itemFk
JOIN `zone` z ON z.id = vZoneFk
LEFT JOIN tmp.buyUltimate bu ON bu.itemFk = a.item_id
LEFT JOIN edi.supplyResponse sr ON sr.ID = it.supplyResponseFk
LEFT JOIN edi.VMPSettings v ON v.VMPID = sr.vmpID
LEFT JOIN edi.marketPlace mp ON mp.id = sr.MarketPlaceID
LEFT JOIN (SELECT isVNHSupplier, isEarlyBird, TRUE AS itemAllowed
FROM addressFilter af
JOIN (SELECT ad.provinceFk, p.countryFk, ad.isLogifloraAllowed
FROM address ad
JOIN province p ON p.id = ad.provinceFk
WHERE ad.id = vAddressFk
) sub2 ON sub2.provinceFk <=> IFNULL(af.provinceFk, sub2.provinceFk)
AND sub2.countryFk <=> IFNULL(af.countryFk, sub2.countryFk)
AND sub2.isLogifloraAllowed <=> IFNULL(af.isLogifloraAllowed, sub2.isLogifloraAllowed)
WHERE vWarehouseFk = af.warehouseFk
AND (vShipped < af.beforeDated
OR ISNULL(af.beforeDated)
OR vShipped > af.afterDated
OR ISNULL(af.afterDated))
) sub ON sub.isVNHSupplier = v.isVNHSupplier
AND (sub.isEarlyBird = mp.isEarlyBird OR ISNULL(sub.isEarlyBird))
JOIN agencyMode am ON am.id = vAgencyModeFk
JOIN agency ag ON ag.id = am.agencyFk
JOIN itemType itt ON itt.id = it.typeFk
@ -124,7 +106,6 @@ BEGIN
AND ait.itemTypeFk = itt.id
WHERE a.calc_id = vAvailableCalc
AND a.available > 0
AND (sub.itemAllowed OR NOT it.isFloramondo OR anr.available > 0)
AND (ag.isAnyVolumeAllowed OR NOT itt.isUnconventionalSize)
AND (it.`size` IS NULL
OR IF(itc.isReclining,

View File

@ -160,9 +160,11 @@ BEGIN
OR (NOT s.isPreparable AND NOT s.isPrintable)
OR pb.collectionH IS NOT NULL
OR pb.collectionV IS NOT NULL
OR pb.collectionA IS NOT NULL
OR pb.collectionN IS NOT NULL
OR (NOT pb.H AND pb.V > 0 AND vItemPackingTypeFk = 'H')
OR (NOT pb.H AND pb.V + pb.A > 0 AND vItemPackingTypeFk = 'H')
OR (NOT pb.V AND vItemPackingTypeFk = 'V')
OR (NOT pb.A AND vItemPackingTypeFk = 'A')
OR (pc.isPreviousPreparationRequired AND pb.previousWithoutParking)
OR LENGTH(pb.problem)
OR pb.lines > vLinesLimit

View File

@ -30,7 +30,7 @@ BEGIN
WITH entriesIn AS (
SELECT 'entry' originType,
e.id originId,
tr.landed shipped,
IFNULL(tr.availabled, tr.landed) shipped,
b.quantity `in`,
NULL `out`,
st.alertLevel ,
@ -54,7 +54,7 @@ BEGIN
OR (util.VN_CURDATE() AND tr.isReceived),
'DELIVERED',
'FREE')
WHERE tr.landed >= vDateInventory
WHERE IFNULL(tr.availabled, tr.landed) >= vDateInventory
AND tr.warehouseInFk = vWarehouseFk
AND (s.id <> vSupplierInventoryFk OR vDated IS NULL)
AND b.itemFk = vItemFk
@ -99,7 +99,7 @@ BEGIN
),
sales AS (
WITH itemSales AS (
SELECT DATE(t.shipped) shipped,
SELECT DATE(t.shipped) + INTERVAL HOUR(z.`hour`) HOUR shipped,
s.quantity,
st2.alertLevel,
st2.name,
@ -114,6 +114,7 @@ BEGIN
cb.claimFk
FROM vn.sale s
JOIN vn.ticket t ON t.id = s.ticketFk
JOIN vn.`zone` z ON z.id = t.zoneFk
LEFT JOIN vn.ticketState ts ON ts.ticketFk = t.id
LEFT JOIN vn.state st ON st.code = ts.code
JOIN vn.client c ON c.id = t.clientFk
@ -189,14 +190,15 @@ BEGIN
SELECT * FROM sales
UNION ALL
SELECT * FROM orders
ORDER BY shipped,
ORDER BY DATE(shipped),
(inventorySupplierFk = entityId) DESC,
alertLevel DESC,
isTicket,
`order` DESC,
isPicked DESC,
`in` DESC,
`out` DESC;
`out` DESC,
shipped;
IF vDated IS NULL THEN
SET @a := 0;
@ -205,7 +207,7 @@ BEGIN
SELECT t.originType,
t.originId,
DATE(@shipped:= t.shipped) shipped,
@shipped:= t.shipped shipped,
t.alertLevel,
t.stateName,
t.reference,

View File

@ -1,5 +1,16 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`item_getLack`(IN vForce BOOLEAN, IN vDays INT)
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`item_getLack`(
vSelf INT,
vForce BOOLEAN,
vDays INT,
vLongname VARCHAR(255),
vProducerName VARCHAR(255),
vColor VARCHAR(255),
vSize INT,
vOrigen INT,
vLack INT,
vWarehouseFk INT
)
BEGIN
/**
* Calcula una tabla con el máximo negativo visible para cada producto y almacen
@ -47,6 +58,14 @@ BEGIN
WHERE w.isForTicket
AND ic.display
AND it.code != 'GEN'
AND (vSelf IS NULL OR i.id = vSelf)
AND (vLongname IS NULL OR i.name = vLongname)
AND (vProducerName IS NULL OR p.`name` LIKE CONCAT('%', vProducerName, '%'))
AND (vColor IS NULL OR vColor = i.inkFk)
AND (vSize IS NULL OR vSize = i.`size`)
AND (vOrigen IS NULL OR vOrigen = w.id)
AND (vLack IS NULL OR vLack = sub.amount)
AND (vWarehouseFk IS NULL OR vWarehouseFk = w.id)
GROUP BY i.id, w.id
HAVING lack < 0;

View File

@ -82,21 +82,26 @@ BEGIN
AND it.priority = vPriority
LEFT JOIN vn.tag t ON t.id = it.tagFk
LEFT JOIN vn.buy b ON b.id = bu.buyFk
LEFT JOIN vn.itemShelvingStock iss ON iss.itemFk = i.id
AND iss.warehouseFk = vWarehouseFk
LEFT JOIN vn.ink ink ON ink.id = i.tag5
JOIN itemTags its
WHERE a.available > 0
AND (i.typeFk = its.typeFk OR NOT vShowType)
AND i.id <> vSelf
ORDER BY `counter` DESC,
(t.name = its.name) DESC,
(it.value = its.value) DESC,
(i.tag5 = its.tag5) DESC,
match5 DESC,
(i.tag6 = its.tag6) DESC,
match6 DESC,
(i.tag7 = its.tag7) DESC,
match7 DESC,
(i.tag8 = its.tag8) DESC,
match8 DESC
ORDER BY (a.available > 0) DESC,
`counter` DESC,
(t.name = its.name) DESC,
(it.value = its.value) DESC,
(i.tag5 = its.tag5) DESC,
(ink.`showOrder`) DESC,
match5 DESC,
(i.tag6 = its.tag6) DESC,
match6 DESC,
(i.tag7 = its.tag7) DESC,
match7 DESC,
(i.tag8 = its.tag8) DESC,
match8 DESC
LIMIT 100;
DROP TEMPORARY TABLE tmp.buyUltimate;

View File

@ -35,8 +35,8 @@ BEGIN
SELECT iei.itemFk, iei.quantity
FROM itemEntryIn iei
JOIN item i ON i.id = iei.itemFk
WHERE iei.landed >= util.VN_CURDATE()
AND iei.landed < vDated
WHERE IFNULL(iei.availabled, iei.landed) >= util.VN_CURDATE()
AND IFNULL(iei.availabled, iei.landed) < vDated
AND iei.warehouseInFk = vWarehouseFk
AND (vItemFk IS NULL OR iei.itemFk = vItemFk)
UNION ALL

View File

@ -1,16 +1,18 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`prepareTicketList`(vStartingDate DATETIME, vEndingDate DATETIME)
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`prepareTicketList`(
vStartingDate DATETIME,
vEndingDate DATETIME
)
BEGIN
DROP TEMPORARY TABLE IF EXISTS tmp.productionTicket;
CREATE TEMPORARY TABLE tmp.productionTicket
(PRIMARY KEY (ticketFk))
ENGINE = MEMORY
SELECT t.id ticketFk, t.clientFk
SELECT t.id ticketFk
FROM ticket t
JOIN alertLevel al ON al.code = 'DELIVERED'
LEFT JOIN ticketState ts ON ts.ticketFk = t.id
JOIN client c ON c.id = t.clientFk
WHERE c.typeFk IN ('normal','handMaking','internalUse')
AND (
t.shipped BETWEEN util.VN_CURDATE() AND vEndingDate

View File

@ -24,24 +24,31 @@ proc: BEGIN
CALL prepareTicketList(util.yesterday(), vEndingDate);
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket
SELECT * FROM tmp.productionTicket;
CALL prepareClientList();
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_getProblems
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT tt.ticketFk, tt.clientFk, t.warehouseFk, t.shipped
FROM tmp.productionTicket tt
JOIN ticket t ON t.id = tt.ticketFk;
SELECT ticketFk
FROM tmp.productionTicket;
CALL ticket_getProblems(vIsTodayRelative);
CREATE OR REPLACE TEMPORARY TABLE tmp.productionBuffer
(PRIMARY KEY(ticketFk), previaParking VARCHAR(255))
ENGINE = MEMORY
WITH saleProblemsDescription AS(
SELECT s.ticketFk,
LEFT(CONCAT('F: ', GROUP_CONCAT(CONCAT(i.id, ' ', i.longName) SEPARATOR ', ')), 250) itemShortage,
LEFT(CONCAT('R: ', GROUP_CONCAT(CONCAT(i2.id, ' ', i2.longName) SEPARATOR ', ')), 250) itemDelay,
LEFT(CONCAT('I: ', GROUP_CONCAT(CONCAT(i3.id, ' ', i3.longName) SEPARATOR ', ')), 250) itemLost
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
LEFT JOIN vn.item i ON i.id = s.itemFk AND sp.hasItemShortage
LEFT JOIN vn.item i2 ON i2.id = s.itemFk AND sp.hasItemDelay
LEFT JOIN vn.item i3 ON i3.id = s.itemFk AND sp.hasItemLost
WHERE hasItemShortage OR hasItemDelay OR hasItemLost
GROUP BY s.ticketFk
)
SELECT tt.ticketFk,
tt.clientFk,
t.clientFk,
t.warehouseFk,
t.nickname,
t.packages,
@ -59,7 +66,17 @@ proc: BEGIN
0 `lines`,
CAST( 0 AS DECIMAL(5,2)) m3,
CAST( 0 AS DECIMAL(5,2)) preparationRate,
"" problem,
TRIM(CAST(CONCAT( IFNULL(sp.itemShortage, ''),
IFNULL(sp.itemDelay, ''),
IFNULL(sp.itemLost, ''),
IF(tpr.isFreezed, ' CONGELADO',''),
IF(tpr.hasHighRisk, ' RIESGO',''),
IF(tpr.hasTicketRequest, ' COD 100',''),
IF(tpr.isTaxDataChecked, ' FICHA INCOMPLETA', ''),
IF(tpr.hasComponentLack, ' COMPONENTES', ''),
IF(HOUR(util.VN_NOW()) < IF(HOUR(t.shipped), HOUR(t.shipped), COALESCE(HOUR(zc.hour),HOUR(z.hour)))
AND tpr.isTooLittle, ' PEQUEÑO', '')
) AS char(255))) problem,
IFNULL(tls.state,2) state,
w.code workerCode,
DATE(t.shipped) shipped,
@ -74,31 +91,34 @@ proc: BEGIN
pk.code parking,
0 H,
0 V,
0 A,
0 N,
st.isOk,
ag.isOwn,
rm.bufferFk
FROM tmp.productionTicket tt
JOIN ticket t ON tt.ticketFk = t.id
JOIN alertLevel al ON al.code = 'FREE'
LEFT JOIN ticketStateToday tst ON tst.ticketFk = t.id
LEFT JOIN `state` st ON st.id = tst.state
LEFT JOIN client c ON c.id = t.clientFk
LEFT JOIN worker wk ON wk.id = c.salesPersonFk
JOIN address a ON a.id = t.addressFk
LEFT JOIN province p ON p.id = a.provinceFk
JOIN agencyMode am ON am.id = t.agencyModeFk
JOIN deliveryMethod dm ON dm.id = am.deliveryMethodFk
JOIN agency ag ON ag.id = am.agencyFk
LEFT JOIN ticketState tls ON tls.ticketFk = tt.ticketFk
LEFT JOIN ticketLastUpdated tlu ON tlu.ticketFk = tt.ticketFk
LEFT JOIN worker w ON w.id = tls.userFk
LEFT JOIN routesMonitor rm ON rm.routeFk = t.routeFk
LEFT JOIN `zone` z ON z.id = t.zoneFk
LEFT JOIN zoneClosure zc ON zc.zoneFk = t.zoneFk
JOIN vn.ticket t ON tt.ticketFk = t.id
JOIN vn.alertLevel al ON al.code = 'FREE'
LEFT JOIN vn.ticketStateToday tst ON tst.ticketFk = t.id
LEFT JOIN vn.`state` st ON st.id = tst.state
LEFT JOIN vn.client c ON c.id = t.clientFk
LEFT JOIN vn.worker wk ON wk.id = c.salesPersonFk
JOIN vn.address a ON a.id = t.addressFk
LEFT JOIN vn.province p ON p.id = a.provinceFk
JOIN vn.agencyMode am ON am.id = t.agencyModeFk
JOIN vn.deliveryMethod dm ON dm.id = am.deliveryMethodFk
JOIN vn.agency ag ON ag.id = am.agencyFk
LEFT JOIN vn.ticketState tls ON tls.ticketFk = tt.ticketFk
LEFT JOIN vn.ticketLastUpdated tlu ON tlu.ticketFk = tt.ticketFk
LEFT JOIN vn.worker w ON w.id = tls.userFk
LEFT JOIN vn.routesMonitor rm ON rm.routeFk = t.routeFk
LEFT JOIN vn.`zone` z ON z.id = t.zoneFk
LEFT JOIN vn.zoneClosure zc ON zc.zoneFk = t.zoneFk
AND DATE(t.shipped) = zc.dated
LEFT JOIN ticketParking tp ON tp.ticketFk = t.id
LEFT JOIN parking pk ON pk.id = tp.parkingFk
LEFT JOIN vn.ticketParking tp ON tp.ticketFk = t.id
LEFT JOIN vn.parking pk ON pk.id = tp.parkingFk
LEFT JOIN tmp.ticketProblems tpr ON tpr.ticketFk = tt.ticketFk
LEFT JOIN saleProblemsDescription sp ON sp.ticketFk = tt.ticketFk
WHERE t.warehouseFk = vWarehouseFk
AND dm.code IN ('AGENCY', 'DELIVERY', 'PICKUP');
@ -119,21 +139,9 @@ proc: BEGIN
CHANGE COLUMN `problem` `problem` VARCHAR(255),
ADD COLUMN `collectionH` INT,
ADD COLUMN `collectionV` INT,
ADD COLUMN `collectionA` INT,
ADD COLUMN `collectionN` INT;
UPDATE tmp.productionBuffer pb
JOIN tmp.ticket_problems tp ON tp.ticketFk = pb.ticketFk
SET pb.problem = TRIM(CAST(CONCAT( IFNULL(tp.itemShortage, ''),
IFNULL(tp.itemDelay, ''),
IFNULL(tp.itemLost, ''),
IF(tp.isFreezed, ' CONGELADO',''),
IF(tp.hasHighRisk, ' RIESGO',''),
IF(tp.hasTicketRequest, ' COD 100',''),
IF(tp.isTaxDataChecked, '',' FICHA INCOMPLETA'),
IF(tp.hasComponentLack, ' COMPONENTES', ''),
IF(HOUR(util.VN_NOW()) < pb.HH AND tp.isTooLittle, ' PEQUEÑO', '')
) AS char(255)));
-- Clientes Nuevos o Recuperados
UPDATE tmp.productionBuffer pb
LEFT JOIN bs.clientNewBorn cnb ON cnb.clientFk = pb.clientFk
@ -173,11 +181,13 @@ proc: BEGIN
SELECT ticketFk,
SUM(sub.H) H,
SUM(sub.V) V,
SUM(sub.A) A,
SUM(sub.N) N
FROM (
SELECT t.ticketFk,
SUM(i.itemPackingTypeFk = 'H') H,
SUM(i.itemPackingTypeFk = 'V') V,
SUM(i.itemPackingTypeFk = 'A') A,
SUM(i.itemPackingTypeFk IS NULL) N
FROM tmp.productionTicket t
JOIN sale s ON s.ticketFk = t.ticketFk
@ -190,6 +200,7 @@ proc: BEGIN
JOIN tItemPackingType ti ON ti.ticketFk = pb.ticketFk
SET pb.H = ti.H,
pb.V = ti.V,
pb.A = ti.A,
pb.N = ti.N;
-- Colecciones segun tipo de encajado
@ -197,6 +208,7 @@ proc: BEGIN
JOIN ticketCollection tc ON pb.ticketFk = tc.ticketFk
SET pb.collectionH = IF(pb.H, tc.collectionFk, NULL),
pb.collectionV = IF(pb.V, tc.collectionFk, NULL),
pb.collectionA = IF(pb.A, tc.collectionFk, NULL),
pb.collectionN = IF(pb.N, tc.collectionFk, NULL);
-- Previa pendiente
@ -278,7 +290,8 @@ proc: BEGIN
DROP TEMPORARY TABLE
tmp.productionTicket,
tmp.ticket,
tmp.ticket_problems,
tmp.ticketProblems,
tmp.saleProblems,
tmp.ticketWithPrevia,
tItemShelvingStock,
tItemPackingType;

View File

@ -1,86 +1,42 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`sale_getProblems`(
vIsTodayRelative tinyint(1)
vIsTodayRelative TINYINT(1)
)
BEGIN
/**
* Calcula los problemas de cada venta para un conjunto de tickets.
* Calcula los problemas para un conjunto de sale
*
* @param vIsTodayRelative Indica si se calcula el disponible como si todo saliera hoy
* @table tmp.sale_getProblems(ticketFk, clientFk, warehouseFk, shipped) Tickets a calcular
* @return tmp.sale_problems
* @table tmp.sale(saleFk) Identificadores de los sale a calcular
* @return tmp.saleProblems
*/
DECLARE vWarehouseFk INT;
DECLARE vWarehouseFk INT;
DECLARE vDate DATE;
DECLARE vAvailableCache INT;
DECLARE vAvailableCache INT;
DECLARE vVisibleCache INT;
DECLARE vDone BOOL;
DECLARE vCursor CURSOR FOR
SELECT DISTINCT warehouseFk, IF(vIsTodayRelative, util.VN_CURDATE(), DATE(shipped))
FROM tmp.sale_getProblems
WHERE shipped BETWEEN util.VN_CURDATE()
AND util.dayEnd(util.VN_CURDATE() + INTERVAL IF(vIsTodayRelative, 9.9, 1.9) DAY);
DECLARE vCursor CURSOR FOR
SELECT t.warehouseFk, IF(vIsTodayRelative, util.VN_CURDATE(), DATE(t.shipped)) dated
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
JOIN ticket t ON t.id = s.ticketFk
WHERE t.shipped BETWEEN util.VN_CURDATE()
AND util.dayEnd(util.VN_CURDATE() + INTERVAL IF(vIsTodayRelative, 9.9, 1.9) DAY)
GROUP BY warehouseFk, dated;
DECLARE CONTINUE HANDLER FOR NOT FOUND SET vDone = TRUE;
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_problems (
ticketFk INT(11),
CREATE OR REPLACE TEMPORARY TABLE tmp.saleProblems(
saleFk INT(11),
isFreezed INTEGER(1) DEFAULT 0,
risk DECIMAL(10,1) DEFAULT 0,
hasRisk TINYINT(1) DEFAULT 0,
hasHighRisk TINYINT(1) DEFAULT 0,
hasTicketRequest INTEGER(1) DEFAULT 0,
itemShortage VARCHAR(255),
isTaxDataChecked INTEGER(1) DEFAULT 1,
itemDelay VARCHAR(255),
itemLost VARCHAR(255),
hasComponentLack INTEGER(1),
hasRounding VARCHAR(255),
isTooLittle BOOL DEFAULT FALSE,
isVip BOOL DEFAULT FALSE,
PRIMARY KEY (ticketFk, saleFk)
); -- No memory
hasItemShortage BOOL DEFAULT FALSE,
hasItemLost BOOL DEFAULT FALSE,
hasComponentLack BOOL DEFAULT FALSE,
hasItemDelay BOOL DEFAULT FALSE,
hasRounding BOOL DEFAULT FALSE,
PRIMARY KEY (saleFk)
) ENGINE = MEMORY;
INSERT INTO tmp.sale_problems(ticketFk,
saleFk,
isFreezed,
risk,
hasRisk,
hasHighRisk,
hasTicketRequest,
isTaxDataChecked,
hasComponentLack,
isTooLittle)
SELECT sgp.ticketFk,
s.id,
IF(FIND_IN_SET('isFreezed', t.problem), TRUE, FALSE) isFreezed,
t.risk,
IF(FIND_IN_SET('hasRisk', t.problem), TRUE, FALSE) hasRisk,
IF(FIND_IN_SET('hasHighRisk', t.problem), TRUE, FALSE) hasHighRisk,
IF(FIND_IN_SET('hasTicketRequest', t.problem), TRUE, FALSE) hasTicketRequest,
IF(FIND_IN_SET('isTaxDataChecked', t.problem), FALSE, TRUE) isTaxDataChecked,
IF(FIND_IN_SET('hasComponentLack', s.problem), TRUE, FALSE) hasComponentLack,
IF(FIND_IN_SET('isTooLittle', t.problem)
AND util.VN_NOW() < (util.VN_CURDATE() + INTERVAL HOUR(zc.`hour`) HOUR) + INTERVAL MINUTE(zc.`hour`) MINUTE,
TRUE, FALSE) isTooLittle
FROM tmp.sale_getProblems sgp
JOIN ticket t ON t.id = sgp.ticketFk
LEFT JOIN sale s ON s.ticketFk = t.id
LEFT JOIN item i ON i.id = s.itemFk
LEFT JOIN zoneClosure zc ON zc.zoneFk = t.zoneFk
AND zc.dated = util.VN_CURDATE()
WHERE s.problem <> '' OR t.problem <> '' OR t.risk
GROUP BY t.id, s.id;
INSERT INTO tmp.sale_problems(ticketFk, isVip)
SELECT sgp.ticketFk, TRUE
FROM tmp.sale_getProblems sgp
JOIN client c ON c.id = sgp.clientFk
WHERE c.businessTypeFk = 'VIP'
ON DUPLICATE KEY UPDATE isVIP = TRUE;
CREATE OR REPLACE TEMPORARY TABLE tItemShelvingStock_byWarehouse
CREATE OR REPLACE TEMPORARY TABLE tItemShelving
(INDEX (itemFk, warehouseFk))
ENGINE = MEMORY
SELECT ish.itemFk itemFk,
@ -92,6 +48,14 @@ BEGIN
JOIN sector s ON s.id = p.sectorFk
GROUP BY ish.itemFk, s.warehouseFk;
-- Componentes: Algún componente obligatorio no se ha calcualdo
INSERT INTO tmp.saleProblems(saleFk, hasComponentLack)
SELECT s.id, TRUE
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
WHERE FIND_IN_SET('hasComponentLack', s.problem)
GROUP BY s.id;
-- Disponible, faltas, inventario y retrasos
OPEN vCursor;
l: LOOP
@ -108,126 +72,108 @@ BEGIN
-- Faltas: visible, disponible y ubicado son menores que la cantidad vendida
CALL cache.visible_refresh(vVisibleCache, FALSE, vWarehouseFk);
INSERT INTO tmp.sale_problems(ticketFk, itemShortage, saleFk)
SELECT ticketFk, problem, saleFk
FROM (
SELECT sgp.ticketFk,
LEFT(CONCAT('F: ', GROUP_CONCAT(i.id, ' ', i.longName, ' ')), 250) problem,
s.id saleFk
FROM tmp.sale_getProblems sgp
JOIN ticket t ON t.id = sgp.ticketFk
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = i.id
AND v.calc_id = vVisibleCache
LEFT JOIN cache.available av ON av.item_id = i.id
AND av.calc_id = vAvailableCache
LEFT JOIN tItemShelvingStock_byWarehouse issw ON issw.itemFk = i.id
AND issw.warehouseFk = t.warehouseFk
WHERE IFNULL(v.visible, 0) < s.quantity
AND IFNULL(av.available, 0) < 0
AND IFNULL(issw.visible, 0) < s.quantity
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY sgp.ticketFk) sub
ON DUPLICATE KEY UPDATE itemShortage = sub.problem, saleFk = sub.saleFk;
INSERT INTO tmp.saleProblems(saleFk, hasItemShortage)
SELECT s.id, TRUE
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
JOIN ticket t ON t.id = s.ticketFk
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = i.id
AND v.calc_id = vVisibleCache
LEFT JOIN cache.available av ON av.item_id = i.id
AND av.calc_id = vAvailableCache
LEFT JOIN tItemShelving tis ON tis.itemFk = i.id
AND tis.warehouseFk = t.warehouseFk
WHERE (s.quantity > v.visible OR (s.quantity > 0 AND v.visible IS NULL))
AND (av.available < 0 OR av.available IS NULL)
AND (s.quantity > tis.visible OR tis.visible IS NULL)
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY s.id
ON DUPLICATE KEY UPDATE hasItemShortage = TRUE;
-- Inventario: Visible suficiente, pero ubicado menor a la cantidad vendida
INSERT INTO tmp.sale_problems(ticketFk, itemLost, saleFk)
SELECT ticketFk, problem, saleFk
FROM (
SELECT sgp.ticketFk,
LEFT(GROUP_CONCAT('I: ', i.id, ' ', i.longName, ' '), 250) problem,
s.id saleFk
FROM tmp.sale_getProblems sgp
JOIN ticket t ON t.id = sgp.ticketFk
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = s.itemFk
AND v.calc_id = vVisibleCache
LEFT JOIN tItemShelvingStock_byWarehouse issw ON issw.itemFk = i.id
AND issw.warehouseFk = t.warehouseFk
WHERE IFNULL(v.visible, 0) >= s.quantity
AND IFNULL(issw.visible, 0) < s.quantity
AND s.quantity > 0
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY sgp.ticketFk
) sub
ON DUPLICATE KEY UPDATE itemLost = sub.problem, saleFk = sub.saleFk;
INSERT INTO tmp.saleProblems(saleFk, hasItemLost)
SELECT s.id, TRUE
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
JOIN ticket t ON t.id = s.ticketFk
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = s.itemFk
AND v.calc_id = vVisibleCache
LEFT JOIN tItemShelving tis ON tis.itemFk = i.id
AND tis.warehouseFk = t.warehouseFk
WHERE (v.visible >= s.quantity OR v.visible IS NULL)
AND (s.quantity > tis.visible AND tis.visible IS NOT NULL)
AND s.quantity > 0
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY s.id
ON DUPLICATE KEY UPDATE hasItemLost = TRUE;
-- Retraso: Disponible suficiente, pero no visible ni ubicado
INSERT INTO tmp.sale_problems(ticketFk, itemDelay, saleFk)
SELECT ticketFk, problem, saleFk
FROM (
SELECT sgp.ticketFk,
LEFT(GROUP_CONCAT('R: ', i.id, ' ', i.longName, ' '), 250) problem,
s.id saleFk
FROM tmp.sale_getProblems sgp
JOIN ticket t ON t.id = sgp.ticketFk
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = s.itemFk
AND v.calc_id = vVisibleCache
LEFT JOIN cache.available av ON av.item_id = i.id
AND av.calc_id = vAvailableCache
LEFT JOIN tItemShelvingStock_byWarehouse issw ON issw.itemFk = i.id
AND issw.warehouseFk = t.warehouseFk
WHERE IFNULL(v.visible, 0) < s.quantity
AND IFNULL(av.available, 0) >= 0
AND IFNULL(issw.visible, 0) < s.quantity
AND s.quantity > 0
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY sgp.ticketFk
) sub
ON DUPLICATE KEY UPDATE itemDelay = sub.problem, saleFk = sub.saleFk;
INSERT INTO tmp.saleProblems(saleFk, hasItemDelay)
SELECT s.id, TRUE
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
JOIN ticket t ON t.id = s.ticketFk
JOIN item i ON i.id = s.itemFk
JOIN itemType it ON it.id = i.typeFk
JOIN itemCategory ic ON ic.id = it.categoryFk
LEFT JOIN cache.visible v ON v.item_id = s.itemFk
AND v.calc_id = vVisibleCache
LEFT JOIN cache.available av ON av.item_id = i.id
AND av.calc_id = vAvailableCache
LEFT JOIN tItemShelving tis ON tis.itemFk = i.id
AND tis.warehouseFk = t.warehouseFk
WHERE (s.quantity > v.visible AND v.visible IS NULL)
AND (av.available >= 0 OR av.available IS NULL)
AND (s.quantity > tis.visible AND tis.visible IS NOT NULL)
AND s.quantity > 0
AND NOT s.isPicked
AND NOT s.reserved
AND ic.merchandise
AND IF(vIsTodayRelative, TRUE, DATE(t.shipped) = vDate)
AND NOT i.generic
AND util.VN_CURDATE() = vDate
AND t.warehouseFk = vWarehouseFk
GROUP BY s.id
ON DUPLICATE KEY UPDATE hasItemDelay = TRUE;
-- Redondeo: cantidad incorrecta con respecto al grouping
CALL buy_getUltimate(NULL, vWarehouseFk, vDate);
INSERT INTO tmp.sale_problems(ticketFk, hasRounding, saleFk)
SELECT ticketFk, problem, saleFk
FROM (
SELECT sgp.ticketFk,
s.id saleFk,
LEFT(GROUP_CONCAT('RE: ',i.id, ' ', IFNULL(i.longName,'') SEPARATOR ', '), 250) problem
FROM tmp.sale_getProblems sgp
JOIN ticket t ON t.id = sgp.ticketFk
AND t.warehouseFk = vWarehouseFk
JOIN sale s ON s.ticketFk = sgp.ticketFk
JOIN item i ON i.id = s.itemFk
JOIN tmp.buyUltimate bu ON bu.itemFk = s.itemFk
JOIN buy b ON b.id = bu.buyFk
WHERE MOD(s.quantity, b.`grouping`)
GROUP BY sgp.ticketFk
)sub
ON DUPLICATE KEY UPDATE hasRounding = sub.problem, saleFk = sub.saleFk;
INSERT INTO tmp.saleProblems(saleFk, hasRounding)
SELECT s.id, TRUE
FROM tmp.sale ts
JOIN sale s ON s.id = ts.saleFk
JOIN ticket t ON t.id = s.ticketFk
AND t.warehouseFk = vWarehouseFk
JOIN item i ON i.id = s.itemFk
JOIN tmp.buyUltimate bu ON bu.itemFk = s.itemFk
JOIN buy b ON b.id = bu.buyFk
WHERE MOD(s.quantity, b.`grouping`)
GROUP BY s.id
ON DUPLICATE KEY UPDATE hasRounding = TRUE;
DROP TEMPORARY TABLE tmp.buyUltimate;
END LOOP;
CLOSE vCursor;
DROP TEMPORARY TABLE tItemShelvingStock_byWarehouse;
DROP TEMPORARY TABLE tItemShelving;
END$$
DELIMITER ;

View File

@ -1,25 +1,25 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`sale_getProblemsByTicket`(IN vTicketFk INT, IN vIsTodayRelative TINYINT(1))
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`sale_getProblemsByTicket`(
IN vTicketFk INT,
IN vIsTodayRelative TINYINT(1)
)
BEGIN
/**
* Calcula los problemas de cada venta
* para un conjunto de tickets.
* Calcula los problemas de cada venta para un tickets.
*
* @return Problems result
*/
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_getProblems
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT t.id ticketFk, t.clientFk, t.warehouseFk, t.shipped
FROM ticket t
WHERE t.id = vTicketFk;
CREATE OR REPLACE TEMPORARY TABLE tmp.sale
(INDEX (saleFk))
ENGINE = MEMORY
SELECT id saleFk FROM sale WHERE ticketFk = vTicketFk;
CALL sale_getProblems(vIsTodayRelative);
CALL sale_getProblems(vIsTodayRelative);
SELECT * FROM tmp.sale_problems;
SELECT * FROM tmp.saleProblems;
DROP TEMPORARY TABLE
tmp.sale_getProblems,
tmp.sale_problems;
DROP TEMPORARY TABLE
tmp.saleProblems,
tmp.sale;
END$$
DELIMITER ;

View File

@ -25,9 +25,11 @@ BEGIN
DECLARE vNewSaleFk INT;
DECLARE vFinalPrice DECIMAL(10,2);
DECLARE vIsRequiredTx BOOL DEFAULT NOT @@in_transaction;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
CALL util.tx_rollback(vIsRequiredTx);
RESIGNAL;
END;
@ -129,6 +131,6 @@ BEGIN
VALUES(vItemFk, vNewItemFk, 1)
ON DUPLICATE KEY UPDATE counter = counter + 1;
COMMIT;
CALL util.tx_commit(vIsRequiredTx);
END$$
DELIMITER ;

View File

@ -30,11 +30,16 @@ BEGIN
st.code stateCode,
sub2.code futureStateCode,
st.classColor,
sub2.classColor futureClassColor
sub2.classColor futureClassColor,
am.id agencyFk,
am.name agency,
sub2.agencyModeFk futureAgencyFk,
sub2.agencyMode futureAgency
FROM vn.saleVolume sv
JOIN vn.sale s ON s.id = sv.saleFk
JOIN vn.item i ON i.id = s.itemFk
JOIN vn.ticket t ON t.id = sv.ticketFk
JOIN vn.agencyMode am ON am.id = t.agencyModeFk
JOIN vn.address a ON a.id = t.addressFk
JOIN vn.province p ON p.id = a.provinceFk
JOIN vn.country c ON c.id = p.countryFk
@ -54,16 +59,19 @@ BEGIN
st.name state,
st.code,
st.classColor,
am.id agencyModeFk,
am.name agencyMode,
GROUP_CONCAT(DISTINCT i.itemPackingTypeFk ORDER BY i.itemPackingTypeFk) iptd
FROM vn.ticket t
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.sale s ON s.ticketFk = t.id
JOIN vn.item i ON i.id = s.itemFk
WHERE t.shipped BETWEEN vFutureDated
AND util.dayend(vFutureDated)
AND t.warehouseFk = vWarehouseFk
GROUP BY t.id
FROM vn.ticket t
JOIN vn.agencyMode am ON am.id = t.agencyModeFk
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.sale s ON s.ticketFk = t.id
JOIN vn.item i ON i.id = s.itemFk
WHERE t.shipped BETWEEN vFutureDated
AND util.dayend(vFutureDated)
AND t.warehouseFk = vWarehouseFk
GROUP BY t.id
) sub
GROUP BY sub.addressFk
) sub2 ON sub2.addressFk = t.addressFk AND t.id != sub2.id

View File

@ -1,53 +1,109 @@
DELIMITER $$
CREATE OR REPLACE DEFINER=`vn`@`localhost` PROCEDURE `vn`.`ticket_getProblems`(
vIsTodayRelative tinyint(1)
vIsTodayRelative TINYINT(1)
)
BEGIN
/**
* Calcula los problemas para un conjunto de tickets.
* Agrupados por ticket
*
* @table tmp.sale_getProblems(ticketFk, clientFk, warehouseFk, shipped) Identificadores de los tickets a calcular
* @return tmp.ticket_problems
* @param vIsTodayRelative Indica si se calcula el disponible como si todo saliera hoy
* @table tmp.ticket(ticketFk) Identificadores de los tickets a calcular
* @return tmp.ticketProblems, tmp.saleProblems
*/
CREATE OR REPLACE TEMPORARY TABLE tmp.sale (
saleFk INT(11),
PRIMARY KEY (saleFk)
) ENGINE = MEMORY
SELECT DISTINCT s.id saleFk
FROM tmp.ticket tt
JOIN ticket t ON t.id = tt.ticketFk
JOIN sale s ON s.ticketFk = t.id
GROUP BY s.id;
CALL sale_getProblems(vIsTodayRelative);
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket_problems
(PRIMARY KEY (ticketFk))
ENGINE = MEMORY
SELECT ticketFk,
MAX(isFreezed) isFreezed,
MAX(risk) risk,
MAX(hasRisk) hasRisk,
MAX(hasHighRisk) hasHighRisk,
MAX(hasTicketRequest) hasTicketRequest,
MAX(itemShortage) itemShortage,
MIN(isTaxDataChecked) isTaxDataChecked,
MAX(hasComponentLack) hasComponentLack,
MAX(isTooLittle) isTooLittle,
MAX(itemDelay) itemDelay,
MAX(hasRounding) hasRounding,
MAX(itemLost) itemLost,
MAX(isVip) isVip,
CREATE OR REPLACE TEMPORARY TABLE tmp.ticketProblems (
ticketFk INT(11),
isFreezed BOOL DEFAULT FALSE,
risk DECIMAL(10,1) DEFAULT 0,
hasRisk BOOL DEFAULT FALSE,
hasHighRisk BOOL DEFAULT FALSE,
hasTicketRequest BOOL DEFAULT FALSE,
isTaxDataChecked BOOL DEFAULT FALSE,
isTooLittle BOOL DEFAULT FALSE,
isVip BOOL DEFAULT FALSE,
hasItemShortage BOOL DEFAULT FALSE,
hasItemDelay BOOL DEFAULT FALSE,
hasItemLost BOOL DEFAULT FALSE,
hasComponentLack BOOL DEFAULT FALSE,
hasRounding BOOL DEFAULT FALSE,
PRIMARY KEY (ticketFk)
) ENGINE = MEMORY
WITH hasItemShortage AS(
SELECT s.ticketFk
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
WHERE sp.hasItemShortage
GROUP BY s.ticketFk
),hasItemLost AS(
SELECT s.ticketFk
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
WHERE sp.hasItemLost
GROUP BY s.ticketFk
),hasRounding AS(
SELECT s.ticketFk
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
WHERE sp.hasRounding
GROUP BY s.ticketFk
), hasItemDelay AS(
SELECT s.ticketFk
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
WHERE sp.hasItemDelay
GROUP BY s.ticketFk
), hasComponentLack AS(
SELECT s.ticketFk
FROM tmp.saleProblems sp
JOIN vn.sale s ON s.id = sp.saleFk
WHERE sp.hasComponentLack
GROUP BY s.ticketFk
)SELECT tt.ticketFk,
FIND_IN_SET('isFreezed', t.problem) > 0 isFreezed,
t.risk,
FIND_IN_SET('hasRisk', t.problem) > 0 hasRisk,
FIND_IN_SET('hasHighRisk', t.problem) > 0 hasHighRisk,
FIND_IN_SET('hasTicketRequest', t.problem) > 0 hasTicketRequest,
FIND_IN_SET('isTaxDataChecked', t.problem) > 0 isTaxDataChecked,
FIND_IN_SET('isTooLittle', t.problem) > 0
AND util.VN_NOW() < (util.VN_CURDATE() +
INTERVAL HOUR(zc.`hour`) HOUR) +
INTERVAL MINUTE(zc.`hour`) MINUTE isTooLittle,
c.businessTypeFk = 'VIP' isVip,
NOT (his.ticketFk IS NULL) hasItemShortage,
NOT (hid.ticketFk IS NULL) hasItemDelay,
NOT (hil.ticketFk IS NULL) hasItemLost,
NOT (hcl.ticketFk IS NULL) hasComponentLack,
NOT (hr.ticketFk IS NULL) hasRounding,
0 totalProblems
FROM tmp.sale_problems
GROUP BY ticketFk;
FROM tmp.ticket tt
JOIN vn.ticket t ON t.id = tt.ticketFk
JOIN vn.client c ON c.id = t.clientFk
LEFT JOIN hasItemShortage his ON his.ticketFk = t.id
LEFT JOIN hasItemLost hil ON hil.ticketFk = t.id
LEFT JOIN hasRounding hr ON hr.ticketFk = t.id
LEFT JOIN hasItemDelay hid ON hid.ticketFk = t.id
LEFT JOIN hasComponentLack hcl ON hcl.ticketFk = t.id
LEFT JOIN vn.zoneClosure zc ON zc.zoneFk = t.zoneFk
AND zc.dated = util.VN_CURDATE()
GROUP BY t.id;
UPDATE tmp.ticket_problems
SET totalProblems = (
(isFreezed) +
(hasHighRisk) +
(hasTicketRequest) +
(!isTaxDataChecked) +
(hasComponentLack) +
(itemDelay IS NOT NULL) +
(isTooLittle) +
(itemLost IS NOT NULL) +
(hasRounding IS NOT NULL) +
(itemShortage IS NOT NULL) +
(isVip)
);
UPDATE tmp.ticketProblems
SET totalProblems = isFreezed + hasHighRisk + hasTicketRequest +
isTaxDataChecked + hasComponentLack + hasItemDelay +
isTooLittle + hasItemLost + hasRounding + hasItemShortage + isVip;
DROP TEMPORARY TABLE tmp.sale_problems;
DROP TEMPORARY TABLE tmp.sale;
END$$
DELIMITER ;

View File

@ -0,0 +1,6 @@
INSERT IGNORE INTO salix.ACL (model,property,accessType,permission,principalType,principalId)
VALUES
('Ticket','itemLack','READ','ALLOW','ROLE','employee'),
('Ticket','itemLackDetail','READ','ALLOW','ROLE','employee'),
('Ticket','split','WRITE','ALLOW','ROLE','employee'),
('Sale','replaceItem','WRITE','ALLOW','ROLE','employee');

View File

@ -0,0 +1,2 @@
ALTER TABLE vn.ticketConfig ADD lackAlertPrice int(11) DEFAULT 30 NOT NULL COMMENT 'Value to alert when item proposal exceed price';
ALTER TABLE vn.ticketConfig ADD lackScopeDays int(11) DEFAULT 2 NOT NULL COMMENT 'Number of days to look back for ticket with negatives';

View File

@ -0,0 +1,3 @@
-- Place your SQL code here
INSERT INTO salix.ACL (model,property,accessType,permission,principalType,principalId)
VALUES ('Ticket','getTicketProblems','READ','ALLOW','ROLE','employee');

View File

@ -0,0 +1,2 @@
-- Place your SQL code here
ALTER TABLE vn.claimConfig ADD IF NOT EXISTS daysToClaim int(11) NOT NULL DEFAULT 7 COMMENT 'Dias para reclamar';

View File

@ -0,0 +1,23 @@
CREATE TABLE vn.parkingCoordinates (
parkingFk int(11) NOT NULL,
x varchar(5) NOT NULL,
y varchar(5) NOT NULL,
z varchar(5) NOT NULL,
CONSTRAINT parkingCoordinates_pk PRIMARY KEY (parkingFk),
CONSTRAINT parkingCoordinates_parking_FK FOREIGN KEY (parkingFk) REFERENCES vn.parking(id) ON DELETE CASCADE ON UPDATE CASCADE
)
ENGINE=InnoDB
DEFAULT CHARSET=utf8mb3
COLLATE=utf8mb3_unicode_ci;
INSERT INTO vn.parkingCoordinates (parkingFk, x, y, z)
SELECT id, `column`, `row`, `floor`
FROM vn.parking
WHERE `column` IS NOT NULL
OR `row` IS NOT NULL
OR `floor` IS NOT NULL;
ALTER TABLE vn.parking
DROP COLUMN `column`,
DROP COLUMN `row`,
DROP COLUMN `floor`;

View File

@ -0,0 +1,41 @@
USE vn;
INSERT INTO salix.ACL (model, property, accessType, permission, principalType, principalId)
VALUES ('Vehicle', 'filter', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'filter', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Vehicle', 'find', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'find', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Vehicle', 'findById', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'findById', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Vehicle', '__get__active', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Vehicle', 'updateAttributes', 'WRITE', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'updateAttributes', 'WRITE', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Vehicle', 'deleteById', 'WRITE', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'deleteById', 'WRITE', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Vehicle', 'create', 'WRITE', 'ALLOW', 'ROLE', 'administrative'),
('Vehicle', 'create', 'WRITE', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('BankPolicy', 'find', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('BankPolicy', 'find', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('VehicleState', 'find', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('VehicleState', 'find', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('Ppe', 'find', 'READ', 'ALLOW', 'ROLE', 'administrative' ),
('Ppe', 'find', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant' ),
('VehicleType', 'find', 'READ', 'ALLOW', 'ROLE', 'employee'),
('DeliveryPoint', 'find', 'READ', 'ALLOW', 'ROLE', 'deliveryAssistant'),
('DeliveryPoint', 'find', 'READ', 'ALLOW', 'ROLE', 'administrative');
CREATE TABLE IF NOT EXISTS vehicleType (
id INT(11) PRIMARY KEY AUTO_INCREMENT,
name VARCHAR(45) NOT NULL
);
INSERT IGNORE INTO vehicleType (id, name)
VALUES (1,'vehículo empresa'),
(2, 'furgoneta'),
(3, 'cabeza tractora'),
(4, 'remolque');
ALTER TABLE vehicle ADD COLUMN importCooler decimal(10,2) DEFAULT NULL;
ALTER TABLE vehicle ADD COLUMN vehicleTypeFk INT(11) DEFAULT 1;
ALTER TABLE vehicle ADD CONSTRAINT fk_vehicle_vehicleType FOREIGN KEY (vehicleTypeFk) REFERENCES vehicleType(id);

View File

@ -0,0 +1,90 @@
INSERT INTO salix.ACL (model,property,accessType,permission,principalType,principalId)
VALUES ('Entry','getBuyList','READ','ALLOW','ROLE','buyer'),
('Entry','getBuyUltimate','READ','ALLOW','ROLE','buyer'),
('Entry','search','READ','ALLOW','ROLE','buyer'),
('Entry','create','WRITE','ALLOW','ROLE','buyer'),
('Entry','cloneEntry','WRITE','ALLOW','ROLE','buyer'),
('Entry','deleteEntry','WRITE','ALLOW','ROLE','buyer'),
('Entry','recalcEntryPrices','WRITE','ALLOW','ROLE','buyer'),
('EntryType','find','READ','ALLOW','ROLE','buyer'),
('EntryConfig','findOne','READ','ALLOW','ROLE','buyer');
ALTER TABLE vn.ink ADD IF NOT EXISTS hexJson TEXT NOT NULL;
UPDATE vn.ink
SET hexJson = CONCAT('{"value": ["',hex,'"]}');
UPDATE vn.ink
SET hexJson = CASE `name`
WHEN 'Blanco/Naranja' THEN '{"value": ["FFFFFF", "FFA500"]}'
WHEN 'Sin especificar' THEN '{"value": ["808080"]}'
WHEN '2 Colores' THEN '{"value": ["000000", "FFFFFF"]}'
WHEN 'Amarillo/Marrón' THEN '{"value": ["FFFF00", "8B4513"]}'
WHEN 'Amarillo/Naranja' THEN '{"value": ["FFFF00", "FFA500"]}'
WHEN 'Rosa/Blanco/Amarillo' THEN '{"value": ["FFC0CB", "FFFFFF", "FFFF00"]}'
WHEN 'Rosa/Amarillo' THEN '{"value": ["FFC0CB", "FFFF00"]}'
WHEN 'Antracita' THEN '{"value": ["2F2F2F"]}'
WHEN 'Azul/Amarillo' THEN '{"value": ["0000FF", "FFFF00"]}'
WHEN 'Azul Claro' THEN '{"value": ["ADD8E6"]}'
WHEN 'Azul/Marron' THEN '{"value": ["0000FF", "8B4513"]}'
WHEN 'Azul/Verde' THEN '{"value": ["0000FF", "008000"]}'
WHEN 'Blanco/Amarillo' THEN '{"value": ["FFFFFF", "FFFF00"]}'
WHEN 'Blaugrana' THEN '{"value": ["A50044", "004D98"]}'
WHEN 'Blanco/Negro' THEN '{"value": ["FFFFFF", "000000"]}'
WHEN 'Blanco/Verde' THEN '{"value": ["FFFFFF", "008000"]}'
WHEN 'Blanco/Azul' THEN '{"value": ["FFFFFF", "0000FF"]}'
WHEN 'Blanco/Rosa' THEN '{"value": ["FFFFFF", "FFC0CB"]}'
WHEN 'Cognac/Verde' THEN '{"value": ["9A463D", "008000"]}'
WHEN 'Champagne/Verde' THEN '{"value": ["F7E7CE", "008000"]}'
WHEN 'Camuflaje' THEN '{"value": ["6B8E23", "556B2F", "8B4513"]}'
WHEN 'Crema/Rosa' THEN '{"value": ["FFFDD0", "FFC0CB"]}'
WHEN 'Fucsia/Amarillo' THEN '{"value": ["FF00FF", "FFFF00"]}'
WHEN 'Fucsia/Blanco' THEN '{"value": ["FF00FF", "FFFFFF"]}'
WHEN 'Fucsia/Crema' THEN '{"value": ["FF00FF", "FFFDD0"]}'
WHEN 'Fucsia/Rosa' THEN '{"value": ["FF00FF", "FFC0CB"]}'
WHEN 'Fucsia/Verde' THEN '{"value": ["FF00FF", "008000"]}'
WHEN 'Granate/Blanco' THEN '{"value": ["800000", "FFFFFF"]}'
WHEN 'Gris Lila' THEN '{"value": ["808080", "C8A2C8"]}'
WHEN 'Lavanda/Amarillo' THEN '{"value": ["E6E6FA", "FFFF00"]}'
WHEN 'Lavanda/Gris' THEN '{"value": ["E6E6FA", "808080"]}'
WHEN 'Lividum' THEN '{"value": ["702963"]}'
WHEN 'Morado/Amarillo' THEN '{"value": ["800080", "FFFF00"]}'
WHEN 'Marrón/Blanco' THEN '{"value": ["8B4513", "FFFFFF"]}'
WHEN 'Marron/Gris' THEN '{"value": ["8B4513", "808080"]}'
WHEN 'Marron/Negro' THEN '{"value": ["8B4513", "000000"]}'
WHEN 'Marrón/Verde' THEN '{"value": ["8B4513", "008000"]}'
WHEN 'Matizado' THEN '{"value": ["D3D3D3", "808080", "FFFFFF"]}'
WHEN 'Mixto' THEN '{"value": ["FF0000", "0000FF", "008000", "FFFF00"]}'
WHEN 'Marrón Oscuro' THEN '{"value": ["654321"]}'
WHEN 'Naranja/Marron' THEN '{"value": ["FFA500", "8B4513"]}'
WHEN 'Naranja/Rosa' THEN '{"value": ["FFA500", "FFC0CB"]}'
WHEN 'Ocre/Burgundi' THEN '{"value": ["CC7722", "800020"]}'
WHEN 'Oro/Plata' THEN '{"value": ["FFD700", "C0C0C0"]}'
WHEN 'Oro/Negro' THEN '{"value": ["FFD700", "000000"]}'
WHEN 'Oro/Verde' THEN '{"value": ["FFD700", "008000"]}'
WHEN 'Purpura/Blanco' THEN '{"value": ["800080", "FFFFFF"]}'
WHEN 'Purpura/Rosa' THEN '{"value": ["800080", "FFC0CB"]}'
WHEN 'Pastel' THEN '{"value": ["FFB6C1", "87CEFA", "98FB98"]}'
WHEN 'Plata' THEN '{"value": ["C0C0C0"]}'
WHEN 'Plata/Verde' THEN '{"value": ["C0C0C0", "008000"]}'
WHEN 'Rojo/Amarillo' THEN '{"value": ["FF0000", "FFFF00"]}'
WHEN 'Rojo/Blanco' THEN '{"value": ["FF0000", "FFFFFF"]}'
WHEN 'Rojo/Naranja' THEN '{"value": ["FF0000", "FFA500"]}'
WHEN 'Rojo/Oro' THEN '{"value": ["FF0000", "FFD700"]}'
WHEN 'Rojo/Verde' THEN '{"value": ["FF0000", "008000"]}'
WHEN 'Rosa/Lila' THEN '{"value": ["FFC0CB", "C8A2C8"]}'
WHEN 'Rosa/Naranja' THEN '{"value": ["FFC0CB", "FFA500"]}'
WHEN 'Rojo/Rosa' THEN '{"value": ["FF0000", "FFC0CB"]}'
WHEN 'Rosa empolvado' THEN '{"value": ["E6B8AF"]}'
WHEN 'Rosa/Verde' THEN '{"value": ["FFC0CB", "008000"]}'
WHEN 'Topo/Blanco' THEN '{"value": ["8B8589", "FFFFFF"]}'
WHEN 'Topo' THEN '{"value": ["8B8589"]}'
WHEN 'Transparente' THEN '{"value": ["00000000"]}'
WHEN 'Verde/Amarillo' THEN '{"value": ["008000", "FFFF00"]}'
WHEN 'Verde/Negro' THEN '{"value": ["008000", "000000"]}'
WHEN 'Variado' THEN '{"value": ["FF0000", "0000FF", "008000", "FFFF00", "FFA500"]}'
WHEN 'Verde Claro/Morado' THEN '{"value": ["90EE90", "800080"]}'
WHEN 'Verde/Lila' THEN '{"value": ["008000", "C8A2C8"]}'
WHEN 'Vaquero Neon' THEN '{"value": ["1560BD", "FFFF00"]}'
ELSE hexJson
END;

View File

@ -0,0 +1,6 @@
INSERT INTO salix.ACL (model, property, accessType, permission, principalType, principalId)
VALUES
('WorkerDms', 'hasHighPrivs', 'READ', 'ALLOW', 'ROLE', 'hr'),
('Business', 'updateAttributes', 'WRITE', 'ALLOW', 'ROLE', 'hr'),
('Worker', '__get__business', 'READ', 'ALLOW', 'ROLE', 'hr')
;

View File

@ -0,0 +1,8 @@
UPDATE vn.expedition e
JOIN (
SELECT id
FROM vn.expedition
WHERE hostFk COLLATE utf8mb3_unicode_ci NOT IN
(SELECT code COLLATE utf8mb3_unicode_ci FROM vn.host WHERE code IS NOT NULL)
) s ON e.id = s.id
SET e.hostFk = 'pc336';

View File

@ -0,0 +1,9 @@
ALTER TABLE vn.expedition
MODIFY COLUMN hostFk VARCHAR(30) COLLATE utf8mb3_general_ci;
ALTER TABLE vn.expedition
ADD CONSTRAINT fk_expedition_host_code
FOREIGN KEY (hostFk)
REFERENCES host(code)
ON UPDATE CASCADE
ON DELETE CASCADE;

View File

@ -1,11 +0,0 @@
version: '3.7'
services:
front:
image: registry.verdnatura.es/salix-front:${VERSION:?}
build:
context: front
back:
image: registry.verdnatura.es/salix-back:${VERSION:?}
build:
context: .
dockerfile: back/Dockerfile

View File

@ -58,10 +58,10 @@
"Swift / BIC can't be empty": "Swift / BIC can't be empty",
"Deleted sales from ticket": "I have deleted the following lines from the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{deletions}}}",
"Added sale to ticket": "I have added the following line to the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{addition}}}",
"Changed sale discount": "I have changed the following lines discounts from the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}}",
"Changed sale discount": "I have changed the following lines discounts from the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}} {{ticketWeekly}}",
"Created claim": "I have created the claim [{{claimId}}]({{{claimUrl}}}) for the following lines from the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}}",
"Changed sale price": "I have changed the price of [{{itemId}} {{concept}}]({{{itemUrl}}}) ({{quantity}}) from {{oldPrice}}€ ➔ *{{newPrice}}€* of the ticket [{{ticketId}}]({{{ticketUrl}}})",
"Changed sale quantity": "I have changed {{changes}} of the ticket [{{ticketId}}]({{{ticketUrl}}})",
"Changed sale price": "I have changed the price of [{{itemId}} {{concept}}]({{{itemUrl}}}) ({{quantity}}) from {{oldPrice}}€ ➔ *{{newPrice}}€* of the ticket [{{ticketId}}]({{{ticketUrl}}}) {{ticketWeekly}}",
"Changed sale quantity": "I have changed {{changes}} of the ticket [{{ticketId}}]({{{ticketUrl}}}) {{ticketWeekly}}",
"Changes in sales": "the quantity of [{{itemId}} {{concept}}]({{{itemUrl}}}) from {{oldQuantity}} ➔ *{{newQuantity}}*",
"Changed sale reserved state": "I have changed the following lines reserved state from the ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}}",
"Bought units from buy request": "Bought {{quantity}} units of [{{itemId}} {{concept}}]({{{urlItem}}}) for the ticket id [{{ticketId}}]({{{url}}})",
@ -234,6 +234,7 @@
"It has been invoiced but the PDF of refund not be generated": "It has been invoiced but the PDF of refund not be generated",
"Cannot add holidays on this day": "Cannot add holidays on this day",
"Cannot send mail": "Cannot send mail",
"This worker already exists": "This worker already exists",
"CONSTRAINT `chkParkingCodeFormat` failed for `vn`.`parking`": "CONSTRAINT `chkParkingCodeFormat` failed for `vn`.`parking`",
"This postcode already exists": "This postcode already exists",
"Original invoice not found": "Original invoice not found",
@ -253,5 +254,9 @@
"Sales already moved": "Sales already moved",
"Holidays to past days not available": "Holidays to past days not available",
"Incorrect delivery order alert on route": "Incorrect delivery order alert on route: {{ route }} zone: {{ zone }}",
"Ticket has been delivered out of order": "The ticket {{ticket}} of route {{{fullUrl}}} has been delivered out of order."
"Ticket has been delivered out of order": "The ticket {{ticket}} of route {{{fullUrl}}} has been delivered out of order.",
"clonedFromTicketWeekly": ", that is a cloned sale from ticket {{ ticketWeekly }}",
"negativeReplaced": "Replaced item [#{{oldItemId}}]({{{oldItemUrl}}}) {{oldItem}} with [#{{newItemId}}]({{{newItemUrl}}}) {{newItem}} from ticket [{{ticketId}}]({{{ticketUrl}}})",
"The tag and priority can't be repeated": "The tag and priority can't be repeated",
"duplicateWarehouse": "The introduced warehouse already exists"
}

View File

@ -22,7 +22,7 @@
"Cannot change the payment method if no salesperson": "No se puede cambiar la forma de pago si no hay comercial asignado",
"can't be blank": "El campo no puede estar vacío",
"Observation type must be unique": "El tipo de observación no puede repetirse",
"The credit must be an integer greater than or equal to zero": "The credit must be an integer greater than or equal to zero",
"The credit must be an integer greater than or equal to zero": "The credit must be an integer greater than or equal to zero",
"The grade must be similar to the last one": "El grade debe ser similar al último",
"Only manager can change the credit": "Solo el gerente puede cambiar el credito de este cliente",
"Name cannot be blank": "El nombre no puede estar en blanco",
@ -121,10 +121,10 @@
"Incoterms is required for a non UEE member": "El incoterms es requerido para los clientes extracomunitarios",
"Deleted sales from ticket": "He eliminado las siguientes lineas del ticket [{{ticketId}}]({{{ticketUrl}}}): {{{deletions}}}",
"Added sale to ticket": "He añadido la siguiente linea al ticket [{{ticketId}}]({{{ticketUrl}}}): {{{addition}}}",
"Changed sale discount": "He cambiado el descuento de las siguientes lineas al ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}}",
"Changed sale discount": "He cambiado el descuento de las siguientes lineas al ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}} {{ticketWeekly}}",
"Created claim": "He creado la reclamación [{{claimId}}]({{{claimUrl}}}) de las siguientes lineas del ticket [{{ticketId}}]({{{ticketUrl}}}): {{{changes}}}",
"Changed sale price": "He cambiado el precio de [{{itemId}} {{concept}}]({{{itemUrl}}}) ({{quantity}}) de {{oldPrice}}€ ➔ *{{newPrice}}€* del ticket [{{ticketId}}]({{{ticketUrl}}})",
"Changed sale quantity": "He cambiado {{changes}} del ticket [{{ticketId}}]({{{ticketUrl}}})",
"Changed sale price": "He cambiado el precio de [{{itemId}} {{concept}}]({{{itemUrl}}}) ({{quantity}}) de {{oldPrice}}€ ➔ *{{newPrice}}€* del ticket [{{ticketId}}]({{{ticketUrl}}}) {{ticketWeekly}} ",
"Changed sale quantity": "He cambiado {{changes}} del ticket [{{ticketId}}]({{{ticketUrl}}}) {{ticketWeekly}}",
"Changes in sales": "la cantidad de [{{itemId}} {{concept}}]({{{itemUrl}}}) de {{oldQuantity}} ➔ *{{newQuantity}}*",
"State": "Estado",
"regular": "normal",
@ -396,6 +396,8 @@
"There are tickets to be invoiced": "La zona tiene tickets por facturar",
"Incorrect delivery order alert on route": "Alerta de orden de entrega incorrecta en ruta: {{ route }} zona: {{ zone }}",
"Ticket has been delivered out of order": "El ticket {{ticket}} {{{fullUrl}}} no ha sido entregado en su orden.",
"Price cannot be blank": "El precio no puede estar en blanco"
"Price cannot be blank": "El precio no puede estar en blanco",
"clonedFromTicketWeekly": ", que es una linea clonada del ticket {{ticketWeekly}}",
"negativeReplaced": "Sustituido el articulo [#{{oldItemId}}]({{{oldItemUrl}}}) {{oldItem}} por [#{{newItemId}}]({{{newItemUrl}}}) {{newItem}} del ticket [{{ticketId}}]({{{ticketUrl}}})",
"duplicateWarehouse": "El almacén seleccionado ya existe en la zona"
}

View File

@ -368,5 +368,6 @@
"ticketLostExpedition": "Le ticket [{{ticketId}}]({{{ticketUrl}}}) a l'expédition perdue suivante : {{expeditionId}}",
"The web user's email already exists": "L'email de l'internaute existe déjà",
"Incorrect delivery order alert on route": "Alerte de bon de livraison incorrect sur l'itinéraire: {{ route }} zone : {{ zone }}",
"Ticket has been delivered out of order": "Le ticket {{ticket}} de la route {{{fullUrl}}} a été livré hors service."
"Ticket has been delivered out of order": "Le ticket {{ticket}} de la route {{{fullUrl}}} a été livré hors service.",
"negativeReplaced": "Remplacé l'article [#{{oldItemId}}]({{{oldItemUrl}}}) {{oldItem}} par [#{{newItemId}}]({{{newItemUrl}}}) {{newItem}} du ticket [{{ticketId}}]({{{ticketUrl}}})"
}

View File

@ -367,5 +367,6 @@
"ticketLostExpedition": "O ticket [{{ticketId}}]({{{ticketUrl}}}) tem a seguinte expedição perdida: {{expeditionId}}",
"The web user's email already exists": "O e-mail do utilizador da web já existe.",
"Incorrect delivery order alert on route": "Alerta de ordem de entrega incorreta na rota: {{ route }} zona: {{ zone }}",
"Ticket has been delivered out of order": "O ticket {{ticket}} da rota {{{fullUrl}}} foi entregue fora de ordem."
"Ticket has been delivered out of order": "O ticket {{ticket}} da rota {{{fullUrl}}} foi entregue fora de ordem.",
"negativeReplaced": "Substituído o artigo [#{{oldItemId}}]({{{oldItemUrl}}}) {{oldItem}} por [#{{newItemId}}]({{{newItemUrl}}}) {{newItem}} do ticket [{{ticketId}}]({{{ticketUrl}}})"
}

View File

@ -4,7 +4,7 @@ const LoopBackContext = require('loopback-context');
describe('ClaimBeginning model()', () => {
const claimFk = 1;
const activeCtx = {
accessToken: {userId: 18},
accessToken: {userId: 72},
headers: {origin: 'localhost:5000'},
__: () => {}
};

View File

@ -3,22 +3,18 @@ const LoopBackContext = require('loopback-context');
describe('Claim createFromSales()', () => {
const ticketId = 23;
const newSale = [{
id: 31,
instance: 0,
quantity: 10
}];
const activeCtx = {
accessToken: {userId: 1},
headers: {origin: 'localhost:5000'},
__: () => {}
};
const ctx = {
req: activeCtx
};
const newSale = [{id: 31, instance: 0, quantity: 10}];
let activeCtx;
let ctx;
beforeEach(() => {
activeCtx = {
accessToken: {userId: 72},
headers: {origin: 'localhost:5000'},
__: () => {}
};
ctx = {req: activeCtx};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: activeCtx
});

View File

@ -1,6 +1,7 @@
const UserError = require('vn-loopback/util/user-error');
const LoopBackContext = require('loopback-context');
const moment = require('moment');
module.exports = Self => {
require('../methods/claim-beginning/importToNewRefundTicket')(Self);
@ -13,10 +14,51 @@ module.exports = Self => {
const options = ctx.options;
const models = Self.app.models;
const saleFk = ctx?.currentInstance?.saleFk || ctx?.instance?.saleFk;
const claimFk = ctx?.instance?.claimFk || ctx?.currentInstance?.claimFk;
const myOptions = {};
const accessToken = ctx?.options?.accessToken || LoopBackContext.getCurrentContext().active.accessToken;
const ctxToken = {req: {accessToken}};
if (typeof options == 'object')
Object.assign(myOptions, options);
const sale = await models.Sale.findById(saleFk, {fields: ['ticketFk', 'quantity']}, options);
const canCreateClaimAfterDeadline = models.ACL.checkAccessAcl(
ctxToken,
'Claim',
'createAfterDeadline',
myOptions
);
const canUpdateClaim = models.ACL.checkAccessAcl(
ctxToken,
'Claim',
'updateClaim',
myOptions
);
if (!canUpdateClaim && !canCreateClaimAfterDeadline)
throw new UserError(`You don't have permission to modify this claim`);
if (canUpdateClaim) {
const query = `
SELECT daysToClaim
FROM vn.claimConfig`;
const res = await Self.rawSql(query);
const daysToClaim = res[0]?.daysToClaim;
const claim = await models.Claim.findById(claimFk, {fields: ['created']}, options);
const claimDate = moment.utc(claim.created);
const currentDate = moment.utc();
const daysSinceSale = currentDate.diff(claimDate, 'days');
if (daysSinceSale > daysToClaim && !canCreateClaimAfterDeadline)
throw new UserError(`You can't modify this claim because the deadline has already passed`);
}
if (ctx.isNewInstance) {
const claim = await models.Claim.findById(ctx.instance.claimFk, {fields: ['ticketFk']}, options);
const claim = await models.Claim.findById(claimFk, {fields: ['ticketFk']}, options);
if (sale.ticketFk != claim.ticketFk)
throw new UserError(`Cannot create a new claimBeginning from a different ticket`);
}
@ -41,7 +83,7 @@ module.exports = Self => {
if (ctx.options && ctx.options.transaction)
myOptions.transaction = ctx.options.transaction;
const claimBeginning = ctx.instance ?? await Self.findById(ctx.where.id);
const claimBeginning = ctx.instance ?? await Self.findById(ctx?.where?.id);
const filter = {
where: {id: claimBeginning.claimFk},

View File

@ -0,0 +1,303 @@
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
const buildFilter = require('vn-loopback/util/filter').buildFilter;
const mergeFilters = require('vn-loopback/util/filter').mergeFilters;
module.exports = Self => {
Self.remoteMethodCtx('getBuyList', {
description: 'Returns buys for editing of one entry',
accessType: 'READ',
accepts: [{
arg: 'entryFk',
type: 'number',
required: true,
description: 'The entry id',
http: {source: 'path'}
},
{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, offset, and limit - must be a JSON-encoded string'
},
{
arg: 'isIgnored',
type: 'boolean',
description: 'check if the buy is ignored',
http: {source: 'query'}
},
{
arg: 'itemFk',
type: 'number',
description: 'item id',
http: {source: 'query'}
},
{
arg: 'name',
type: 'string',
description: 'item name',
http: {source: 'query'}
},
{
arg: 'size',
type: 'number',
description: 'item size',
http: {source: 'query'}
},
{
arg: 'stickers',
type: 'number',
description: 'sticker quantity',
http: {source: 'query'}
},
{
arg: 'packagingFk',
type: 'number',
description: 'packaging id',
http: {source: 'query'}
},
{
arg: 'weight',
type: 'number',
description: 'weight',
http: {source: 'query'}
},
{
arg: 'packing',
type: 'number',
description: 'packing quantity',
http: {source: 'query'}
},
{
arg: 'grouping',
type: 'number',
description: 'grouping quantity',
http: {source: 'query'}
},
{
arg: 'quantity',
type: 'number',
http: {source: 'query'}
},
{
arg: 'buyingValue',
type: 'number',
http: {source: 'query'}
},
{
arg: 'amount',
type: 'number',
description: 'buying value * quantity',
http: {source: 'query'}
},
{
arg: 'price2',
type: 'number',
description: 'price for the package',
http: {source: 'query'}
},
{
arg: 'price3',
type: 'number',
description: 'price for the box',
http: {source: 'query'}
},
{
arg: 'minPrice',
type: 'number',
description: 'item minimum price',
http: {source: 'query'}
},
{
arg: 'packingOut',
type: 'number',
description: 'quantity of package on a vn box',
http: {source: 'query'}
},
{
arg: 'comment',
type: 'string',
description: 'item comment',
http: {source: 'query'}
},
{
arg: 'subName',
type: 'string',
description: 'supplier name',
http: {source: 'query'}
},
{
arg: 'subName',
type: 'string',
description: 'supplier name',
http: {source: 'query'}
},
{
arg: 'company_name',
type: 'string',
description: 'company name',
http: {source: 'query'}
},
{
arg: 'workerFk',
type: 'number',
description: 'buyer id',
http: {source: 'query'}
},
{
arg: 'itemTypeFk',
type: 'number',
description: 'item family id',
http: {source: 'query'}
},
{
arg: 'groupingMode',
type: 'string',
description: 'grouping mode',
http: {source: 'query'}
},
{
arg: 'hasMinPrice',
type: 'boolean',
description: 'grouping mode',
http: {source: 'query'}
},
{
arg: 'groupBy',
type: 'string',
description: 'group by',
http: {source: 'query'}
},
],
returns: {
type: ['object'],
root: true
},
http: {
path: `/:entryFk/getBuyList`,
verb: 'GET'
}
});
Self.getBuyList = async(ctx, entryFk, filter, options) => {
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
let conn = Self.dataSource.connector;
let where = buildFilter(ctx.args, (param, value) => {
switch (param) {
case 'name':
case 'subName':
case 'company_name':
case 'comment':
return {[param]: {like: `%${value}%`}};
case 'size':
case 'isIgnored':
case 'itemFk':
case 'stickers':
case 'packagingFk':
case 'weight':
case 'packing':
case 'grouping':
case 'quantity':
case 'buyingValue':
case 'amount':
case 'price2':
case 'price3':
case 'packingOut':
case 'minPrice':
case 'workerFk':
case 'itemTypeFk':
case 'groupingMode':
case 'hasMinPrice':
return {[param]: value};
}
});
filter = mergeFilters(filter, {where});
let stmts = [];
let stmt;
const selectFields = `b.id,
b.isIgnored,
b.itemFk,
b.printedStickers,
b.stickers,
b.packagingFk,
b.weight,
b.packing,
b.groupingMode,
b.grouping,
b.quantity,
b.buyingValue,
ROUND(b.buyingValue * b.quantity, 2) amount,
b.isChecked,
b.price2,
b.price3,
i.name,
i.size,
i.minPrice,
i.hasMinPrice,
i.packingOut,
i.comment,
i.subName,
i.tag5,
i.value5,
i.tag6,
i.value6,
i.tag7,
i.value7,
i.tag8,
i.value8,
i.tag9,
i.value9,
i.tag10,
i.value10,
s.company_name,
ik.hexJson,
it.workerFk,
it.id itemTypeFk
`;
const groupByFields = `SUM(b.printedStickers) printedStickers,
SUM(b.packing) packing,
SUM(b.stickers) stickers,
SUM(b.weight) weight,
SUM(b.quantity) quantity,
SUM(ROUND(b.buyingValue * b.quantity, 2)) amount
`;
const groupBy = ctx.args.groupBy;
stmt = new ParameterizedSQL(
`SELECT *
FROM(
SELECT
${ groupBy ? groupByFields : selectFields}
FROM item i
LEFT JOIN ink ik ON ik.id = i.inkFk
LEFT JOIN buy b ON b.itemFk = i.id
LEFT JOIN edi.ekt e ON e.id = b.ektFk
LEFT JOIN edi.supplier s ON e.pro = s.supplier_id
LEFT JOIN itemType it ON it.id = i.typeFk
WHERE b.entryFk = ?
${groupBy ?? ''}
) sub`,
[entryFk]
);
stmt.merge(conn.makeSuffix(filter));
let itemsIndex = stmts.push(stmt) - 1;
let sql = ParameterizedSQL.join(stmts, ';');
let result = await conn.executeStmt(sql, myOptions);
if (groupBy && result.length) {
const buys = await Self.app.models.Buy.find({where: {entryFk}}, myOptions);
const buysChecked = buys.filter(buy => buy?.isChecked);
result[0].isChecked = buysChecked.length === buys.length;
}
return itemsIndex === 0 ? result : result[itemsIndex];
};
};

View File

@ -0,0 +1,46 @@
module.exports = Self => {
Self.remoteMethodCtx('getBuyUltimate', {
description: 'Returns the last buy of the item',
accessType: 'READ',
accepts: [
{
arg: 'itemFk',
type: 'number',
required: true
}, {
arg: 'warehouseFk',
type: 'number',
required: true
}, {
arg: 'date',
type: 'date',
required: true
}
],
returns: {
type: 'object',
root: true
},
http: {
path: `/getBuyUltimate`,
verb: 'GET'
}
});
Self.getBuyUltimate = async(ctx, itemFk, warehouseFk, date, options) => {
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
await Self.rawSql('CALL vn.buy_getUltimate(?, ?, ?)', [itemFk, warehouseFk, date], myOptions);
return Self.rawSql(
`SELECT b.*
FROM cache.last_buy lb
JOIN buy b ON b.id = lb.buy_id
WHERE lb.item_id = ?
ORDER BY (lb.warehouse_id = ?) desc
LIMIT 1`,
[itemFk, warehouseFk], myOptions
);
};
};

View File

@ -0,0 +1,46 @@
module.exports = Self => {
Self.remoteMethodCtx('cloneEntry', {
description: 'Clones an entry',
accessType: 'WRITE',
accepts: [{
arg: 'id',
type: 'number',
required: true,
description: 'The entry id',
http: {source: 'path'}
}],
returns: {
type: 'object',
root: true
},
http: {
path: `/:id/cloneEntry`,
verb: 'POST'
}
});
Self.cloneEntry = async(ctx, id, options) => {
const userId = ctx.req.accessToken.userId;
const myOptions = {userId};
let tx;
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
try {
await Self.rawSql('CALL entry_clone(?, @newEntryId)', [id], myOptions);
const result = await Self.rawSql('SELECT @newEntryId', [], myOptions);
const newEntryId = result[0]['@newEntryId'];
if (tx) await tx.commit();
return newEntryId;
} catch (e) {
if (tx) await tx.rollback();
throw e;
}
};
};

View File

@ -0,0 +1,48 @@
module.exports = Self => {
Self.remoteMethodCtx('deleteEntry', {
description: 'Clones an entry',
accessType: 'WRITE',
accepts: [{
arg: 'id',
type: 'number',
required: true,
description: 'The entry id',
http: {source: 'path'}
}],
http: {
path: `/:id/deleteEntry`,
verb: 'POST'
}
});
Self.deleteEntry = async(ctx, id, options) => {
const userId = ctx.req.accessToken.userId;
const myOptions = {userId};
let tx;
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
try {
const entry = await Self.findById(id, null, myOptions);
await entry.updateAttribute('travelFk', null, myOptions);
await Self.rawSql('DELETE FROM vn.duaEntry WHERE entryFk = ?;', [id], myOptions);
await Self.rawSql(`
DELETE i.*
FROM vn.invoiceIn i
JOIN vn.entry e ON e.invoiceInFk = i.id
WHERE e.id = ?`, [id], myOptions
);
if (tx) await tx.commit();
} catch (e) {
if (tx) await tx.rollback();
throw e;
}
};
};

View File

@ -129,7 +129,68 @@ module.exports = Self => {
arg: 'finalTemperature',
type: 'number',
description: 'Final temperature value'
}
},
{
arg: 'isExcludedFromAvailable',
type: 'boolean',
description: `landing date`
},
{
arg: 'isReceived',
type: 'boolean',
description: `travel received`
},
{
arg: 'isRaid',
type: 'boolean',
description: `travel isRaid`
},
{
arg: 'landed',
type: 'date',
description: `landing date`
},
{
arg: 'invoiceNumber',
type: 'string',
description: `entry invoice`
},
{
arg: 'reference',
type: 'string',
description: `entry reference`
},
{
arg: 'awbCode',
type: 'string',
description: `awb code`
},
{
arg: 'agencyModeId',
type: 'number',
description: `agency mode id`
},
{
arg: 'evaNotes',
type: 'string',
description: `observation`
},
{
arg: 'warehouseInFk',
type: 'number',
description: `warehouse in id`
},
{
arg: 'warehouseOutFk',
type: 'number',
description: `warehouse out id`
},
{
arg: 'entryTypeCode',
type: 'string',
description: 'entry type code'
},
],
returns: {
type: ['object'],
@ -156,19 +217,12 @@ module.exports = Self => {
{'s.name': {like: `%${value}%`}},
{'s.nickname': {like: `%${value}%`}}
]};
case 'invoiceNumber':
case 'reference':
case 'ref':
case 'evaNotes':
param = `e.${param}`;
return {[param]: {like: `%${value}%`}};
case 'created':
return {'e.created': {gte: value}};
case 'from':
return {'t.landed': {gte: value}};
case 'fromShipped':
return {'t.shipped': {gte: value}};
case 'to':
return {'t.landed': {lte: value}};
case 'toShipped':
return {'t.shipped': {lte: value}};
case 'id':
case 'isBooked':
case 'isConfirmed':
@ -178,8 +232,20 @@ module.exports = Self => {
case 'currencyFk':
case 'supplierFk':
case 'invoiceInFk':
param = `e.${param}`;
return {[param]: value};
case 'isExcludedFromAvailable':
return {[`e.${param}`]: value};
case 'isReceived':
case 'landed':
case 'isRaid':
case 'warehouseInFk':
case 'warehouseOutFk':
return {[`t.${param}`]: value};
case 'awbCode':
return {'a.code': {like: `%${value}%`}};
case 'agencyModeId':
return {[`am.id`]: value};
case 'entryTypeCode':
return {[`et.code`]: value};
case 'initialTemperature':
return {'e.initialTemperature': {lte: value}};
case 'finalTemperature':
@ -197,15 +263,14 @@ module.exports = Self => {
const stmts = [];
let stmt;
stmt = new ParameterizedSQL(
`SELECT
e.id,
`SELECT e.id,
e.supplierFk,
e.dated,
e.reference,
e.invoiceNumber,
e.isBooked,
e.isExcludedFromAvailable,
e.evaNotes observation,
e.evaNotes,
e.isConfirmed,
e.isOrdered,
t.isRaid,
@ -227,15 +292,27 @@ module.exports = Self => {
cu.code currencyCode,
t.shipped,
t.landed,
t.ref AS travelRef,
t.ref travelRef,
t.warehouseInFk,
w.name warehouseInName
w.name warehouseInName,
t.warehouseOutFk,
w2.name warehouseOutName,
a.code awbCode,
am.id agencyModeId,
am.name agencyModeName,
et.code entryTypeCode,
et.description entryTypeDescription,
t.isReceived
FROM vn.entry e
JOIN vn.supplier s ON s.id = e.supplierFk
JOIN vn.travel t ON t.id = e.travelFk
JOIN vn.warehouse w ON w.id = t.warehouseInFk
JOIN vn.company co ON co.id = e.companyFk
JOIN vn.currency cu ON cu.id = e.currencyFk`
LEFT JOIN vn.travel t ON t.id = e.travelFk
LEFT JOIN vn.warehouse w ON w.id = t.warehouseInFk
LEFT JOIN vn.warehouse w2 ON w2.id = t.warehouseOutFk
LEFT JOIN vn.company co ON co.id = e.companyFk
LEFT JOIN vn.currency cu ON cu.id = e.currencyFk
LEFT JOIN vn.awb a ON a.id = t.awbFk
LEFT JOIN vn.agencyMode am ON am.id = t.agencyModeFk
LEFT JOIN vn.entryType et ON et.code = e.typeFk`
);
stmt.merge(conn.makeWhere(filter.where));

View File

@ -0,0 +1,49 @@
module.exports = Self => {
Self.remoteMethodCtx('recalcEntryPrices', {
description: 'Clones an entry',
accessType: 'WRITE',
accepts: [{
arg: 'entryFk',
type: 'number',
required: true,
description: 'The entry id',
http: {source: 'path'}
}],
returns: {
type: 'object',
root: true
},
http: {
path: `/:entryFk/recalcEntryPrices`,
verb: 'POST'
}
});
Self.recalcEntryPrices = async(ctx, entryFk, options) => {
const userId = ctx.req.accessToken.userId;
const myOptions = {userId};
let tx;
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
const entry = await Self.findById(entryFk, myOptions);
const entryConfig = await Self.app.models.EntryConfig.findOne({}, myOptions);
if (entry.supplierFk === entryConfig.inventorySupplierFk) return;
try {
const result = await Self.rawSql('CALL vn.buy_recalcPricesByEntry(?)', [entryFk], myOptions);
if (tx) await tx.commit();
return result[0];
} catch (e) {
if (tx) await tx.rollback();
throw e;
}
};
};

View File

@ -45,7 +45,7 @@ module.exports = Self => {
{
relation: 'user',
scope: {
fields: ['id', 'name']
fields: ['id', 'nickname']
}
}
]

View File

@ -31,5 +31,8 @@
},
"InventoryConfig": {
"dataSource": "vn"
},
"EntryConfig": {
"dataSource": "vn"
}
}

View File

@ -0,0 +1,30 @@
{
"name": "EntryConfig",
"base": "VnModel",
"mixins": {
"Loggable": true
},
"options": {
"mysql": {
"table": "entryConfig"
}
},
"properties": {
"defaultEntry": {
"type": "number",
"id": true
},
"mailToNotify": {
"type": "string"
},
"inventorySupplierFk": {
"type": "number"
},
"maxLockTime": {
"type": "number"
},
"defaultSupplierFk": {
"type": "number"
}
}
}

View File

@ -15,8 +15,13 @@ module.exports = Self => {
require('../methods/entry/transfer')(Self);
require('../methods/entry/labelSupplier')(Self);
require('../methods/entry/buyLabelSupplier')(Self);
require('../methods/entry-buys/getBuyList')(Self);
require('../methods/entry-buys/getBuyUltimate')(Self);
require('../methods/entry/cloneEntry')(Self);
require('../methods/entry/deleteEntry')(Self);
require('../methods/entry/recalcEntryPrices')(Self);
Self.observe('before save', async function(ctx, options) {
Self.observe('before save', async(ctx, options) => {
if (ctx.isNewInstance) return;
const changes = ctx.data || ctx.instance;

View File

@ -56,8 +56,7 @@
"required": true
},
"travelFk": {
"type": "number",
"required": true
"type": "number"
},
"companyFk": {
"type": "number",
@ -74,6 +73,12 @@
},
"finalTemperature": {
"type": "number"
},
"lockerUserFk":{
"type": "number"
},
"locked":{
"type": "date"
}
},
"relations": {
@ -107,6 +112,16 @@
"type": "belongsTo",
"model": "EntryType",
"foreignKey": "typeFk"
}
},
"invoiceIn": {
"type": "belongsTo",
"model": "InvoiceIn",
"foreignKey": "invoiceInFk"
},
"user": {
"type": "belongsTo",
"model": "VnUser",
"foreignKey": "lockerUserFk"
}
}
}

View File

@ -48,12 +48,10 @@ module.exports = Self => {
}, myOptions);
let asien = bookEntry?.ASIEN;
const invoiceIn = await Self.findById(invoiceInId, myOptions);
if (asien) {
accountingEntries = await models.Xdiario.count({ASIEN: asien}, myOptions);
await models.Xdiario.destroyAll({ASIEN: asien}, myOptions);
const invoiceIn = await Self.findById(invoiceInId, myOptions);
await invoiceIn.updateAttribute('isBooked', false, myOptions);
} else {
const linkedBookEntry = await models.Xdiario.findOne({
fields: ['ASIEN'],
@ -66,6 +64,8 @@ module.exports = Self => {
asien = linkedBookEntry?.ASIEN;
isLinked = true;
}
await invoiceIn.updateAttribute('isBooked', false, myOptions);
if (tx) await tx.commit();
return {

View File

@ -22,6 +22,11 @@ module.exports = Self => {
type: 'integer',
description: 'The item id',
},
{
arg: 'name',
type: 'string',
description: 'The item name',
},
{
arg: 'typeFk',
type: 'integer',
@ -112,6 +117,8 @@ module.exports = Self => {
: {'it.code': {like: `%${value}%`}};
case 'categoryFk':
return {'it.categoryFk': value};
case 'name':
return {'i.name': {like: `%${value}%`}};
case 'buyerFk':
return {'it.workerFk': value};
case 'warehouseFk':

View File

@ -0,0 +1,43 @@
module.exports = Self => {
Self.remoteMethodCtx('getSimilar', {
description: 'Returns list of items with similar item requested',
accessType: 'READ',
accepts: [
{
arg: 'filter',
type: 'Object',
required: true,
description: 'Filter defining where and paginated data',
http: {source: 'query'}
}
],
returns: {
type: ['Object'],
root: true
},
http: {
path: `/getSimilar`,
verb: 'GET'
}
});
Self.getSimilar = async(ctx, filter, options) => {
const myOptions = {userId: ctx.req.accessToken.userId};
if (typeof options == 'object')
Object.assign(myOptions, options);
const {where} = filter;
const query = [
filter.itemFk,
where.warehouseFk,
where.date,
where.showType,
where.scopeDays
];
const [results] = await Self.rawSql('CALL vn.item_getSimilar(?, ?, ?, ?, ?)', query, myOptions);
return results;
};
};

View File

@ -0,0 +1,38 @@
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
module.exports = Self => {
Self.remoteMethodCtx('search', {
description: 'Returns an array of search results for a specified item',
accepts: [{
arg: 'filter',
type: 'object',
description: 'Filter to define conditions and paginate the data.',
required: true
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/search`,
verb: 'GET'
}
});
Self.search = async(ctx, filter) => {
const conn = Self.dataSource.connector;
const stmt = new ParameterizedSQL(`
SELECT *
FROM(
SELECT i.id, i.name, i.size, p.name producerName
FROM item i
LEFT JOIN producer p ON p.id = i.producerFk
) sub
`);
stmt.merge(conn.makeSuffix(filter));
return conn.executeStmt(stmt);
};
};

View File

@ -89,7 +89,7 @@ describe('item filter()', () => {
const ctx = {args: {filter: filter, workerFk: 16}, req: {accessToken: {userId: 1}}};
const result = await models.Item.filter(ctx, filter, options);
expect(result.length).toEqual(2);
expect(result.length).toEqual(3);
expect(result[0].id).toEqual(16);
expect(result[1].id).toEqual(71);

View File

@ -0,0 +1,49 @@
const models = require('vn-loopback/server/server').models;
describe('Item get similar', () => {
let options;
let tx;
const ctx = beforeAll.getCtx();
beforeAll.mockLoopBackContext();
beforeEach(async() => {
tx = await models.Item.beginTransaction({});
options = {transaction: tx};
});
afterEach(async() => {
if (tx)
await tx.rollback();
});
it('should return similar items', async() => {
const filter = {
itemFk: 88, sales: 43,
where: {
'scopeDays': '2',
'showType': true,
'alertLevelCode': 'FREE',
'date': '2001-01-01T11:00:00.000Z',
'warehouseFk': 1
}
};
const result = await models.Item.getSimilar(ctx, filter, options);
expect(result.length).toEqual(2);
});
it('should return empty array is if not exists', async() => {
const filter = {
itemFk: 88, sales: 43,
where: {
'scopeDays': '2',
'showType': true,
'alertLevelCode': 'FREE',
'date': '2001-01-01T11:00:00.000Z',
'warehouseFk': 60
}
};
const result = await models.Item.getSimilar(ctx, filter, options);
expect(result.length).toEqual(0);
});
});

View File

@ -26,7 +26,7 @@ describe('tag filterValue()', () => {
const filter = {where: {value: 'Blue'}, limit: 5};
const result = await models.Tag.filterValue(colorTagId, filter, options);
expect(result.length).toEqual(2);
expect(result.length).toEqual(3);
expect(result[0].value).toEqual('Blue');
expect(result[1].value).toEqual('Blue/Silver');

View File

@ -17,6 +17,9 @@
},
"showOrder": {
"type": "number"
},
"hexJson": {
"type": "string"
}
},
"acls": [

View File

@ -5,6 +5,7 @@ module.exports = Self => {
require('../methods/item/clone')(Self);
require('../methods/item/updateTaxes')(Self);
require('../methods/item/getBalance')(Self);
require('../methods/item/getSimilar')(Self);
require('../methods/item/lastEntriesFilter')(Self);
require('../methods/item/getSummary')(Self);
require('../methods/item/getCard')(Self);
@ -17,6 +18,7 @@ module.exports = Self => {
require('../methods/item/buyerWasteEmail')(Self);
require('../methods/item/setVisibleDiscard')(Self);
require('../methods/item/get')(Self);
require('../methods/item/search')(Self);
Self.validatesPresenceOf('originFk', {message: 'Cannot be blank'});

View File

@ -258,10 +258,10 @@ module.exports = Self => {
stmts.push(`SET SESSION optimizer_search_depth = @_optimizer_search_depth`);
stmt = new ParameterizedSQL(`
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_getProblems
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT f.id ticketFk, f.clientFk, f.warehouseFk, f.shipped
SELECT f.id ticketFk
FROM tmp.filter f
LEFT JOIN alertLevel al ON al.id = f.alertLevel
WHERE (al.code = 'FREE' OR f.alertLevel IS NULL)
@ -282,7 +282,7 @@ module.exports = Self => {
stmts.push('CALL ticket_getWarnings()');
stmt = new ParameterizedSQL(`
UPDATE tmp.ticket_problems
UPDATE tmp.ticketProblems
SET risk = IF(hasRisk, risk, 0)
`);
stmts.push(stmt);
@ -290,7 +290,7 @@ module.exports = Self => {
stmt = new ParameterizedSQL(`
SELECT *
FROM tmp.filter f
LEFT JOIN tmp.ticket_problems tp ON tp.ticketFk = f.id
LEFT JOIN tmp.ticketProblems tp ON tp.ticketFk = f.id
LEFT JOIN tmp.ticket_warnings tw ON tw.ticketFk = f.id
`);
stmts.push(stmt);
@ -307,8 +307,8 @@ module.exports = Self => {
{'tp.hasRisk': true},
{'tp.hasTicketRequest': true},
{'tp.hasComponentLack': true},
{'tp.isTaxDataChecked': false},
{'tp.itemShortage': {neq: null}},
{'tp.isTaxDataChecked': true},
{'tp.hasItemShortage': true},
{'tp.isTooLittle': true}
]};
} else if (hasProblems === false) {
@ -317,8 +317,8 @@ module.exports = Self => {
{'tp.hasRisk': false},
{'tp.hasTicketRequest': false},
{'tp.hasComponentLack': false},
{'tp.isTaxDataChecked': true},
{'tp.itemShortage': null},
{'tp.isTaxDataChecked': false},
{'tp.hasItemShortage': false},
{'tp.isTooLittle': false}
]};
}
@ -392,9 +392,9 @@ module.exports = Self => {
stmts.push(`
DROP TEMPORARY TABLE
tmp.ticket,
tmp.filter,
tmp.ticket_problems,
tmp.sale_getProblems,
tmp.ticketProblems,
tmp.sale_getWarnings,
tmp.ticket_warnings
`);

View File

@ -68,7 +68,7 @@ describe('SalesMonitor salesFilter()', () => {
const filter = {};
const result = await models.SalesMonitor.salesFilter(ctx, filter, options);
expect(result.length).toEqual(4);
expect(result.length).toEqual(5);
await tx.rollback();
} catch (e) {

View File

@ -31,9 +31,9 @@ describe('route getSuggestedTickets()', () => {
const length = result.length;
const anyResult = result[Math.floor(Math.random() * Math.floor(length))];
expect(result.length).toEqual(4);
expect(result.length).toEqual(5);
expect(anyResult.zoneFk).toEqual(1);
expect(anyResult.agencyModeFk).toEqual(8);
expect([1, 8]).toContain(anyResult.agencyModeFk);
await tx.rollback();
} catch (e) {

View File

@ -14,7 +14,7 @@ describe('route unlink()', () => {
let tickets = await models.Route.getSuggestedTickets(routeId, options);
expect(zoneAgencyModes.length).toEqual(4);
expect(tickets.length).toEqual(3);
expect(tickets.length).toEqual(4);
await models.Route.unlink(agencyModeId, zoneId, options);

View File

@ -0,0 +1,128 @@
const {ParameterizedSQL} = require('loopback-connector');
const {buildFilter, mergeFilters} = require('vn-loopback/util/filter');
module.exports = Self => {
Self.remoteMethodCtx('filter', {
description: 'Find all instances of the model matched by filter from the data source.',
accessType: 'READ',
accepts: [{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, skip and limit - must be a JSON-encoded string',
http: {source: 'query'}
}, {
arg: 'search',
type: 'string',
description: 'Searchs the vehicle by id or numberPlate',
http: {source: 'query'}
}, {
arg: 'id',
type: 'number'
}, {
arg: 'description',
type: 'string'
}, {
arg: 'companyFk',
type: 'number'
}, {
arg: 'tradeMark',
type: 'string'
}, {
arg: 'numberPlate',
type: 'string'
}, {
arg: 'warehouseFk',
type: 'number'
}, {
arg: 'chassis',
type: 'string'
}, {
arg: 'leasing',
type: 'string'
}, {
arg: 'countryCodeFk',
type: 'string'
}, {
arg: 'vehicleTypeFk',
type: 'number'
}, {
arg: 'vehicleStateFk',
type: 'number'
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/filter`,
verb: `GET`
}
});
Self.filter = async(ctx, filter, options) => {
const conn = Self.dataSource.connector;
const myOptions = {};
if (typeof options == 'object') Object.assign(myOptions, options);
const where = buildFilter(ctx.args, (param, value) => {
switch (param) {
case 'search':
return {or: [{'v.id': value}, {numberPlate: {like: `%${value}%`}}]};
case 'id':
return {'v.id': value};
case 'description':
case 'tradeMark':
case 'numberPlate':
case 'chassis':
case 'leasing':
return {[param]: {like: `%${value}%`}};
case 'companyFk':
case 'warehouseFk':
case 'countryCodeFk':
case 'vehicleStateFk':
case 'vehicleTypeFk':
return {[param]: value};
}
});
filter = mergeFilters(filter, {where});
const stmt = new ParameterizedSQL(`
SELECT v.id,
v.numberPlate,
v.tradeMark,
v.model,
v.m3,
v.description,
v.isActive,
v.countryCodeFk,
v.chassis,
v.leasing,
vt.name type,
w.name warehouse,
c.code company,
sub.state
FROM vehicle v
JOIN vehicleType vt ON vt.id = v.vehicleTypeFk
LEFT JOIN warehouse w ON w.id = v.warehouseFk
LEFT JOIN company c ON c.id = v.companyFk
LEFT JOIN (
SELECT e.vehicleFk,
e.vehicleStateFk,
s.state,
ROW_NUMBER() OVER (PARTITION BY e.vehicleFk ORDER BY e.started DESC) rn
FROM vehicleEvent e
LEFT JOIN vehicleState s ON e.vehicleStateFk = s.id
) sub ON sub.vehicleFk = v.id AND sub.rn = 1
`);
const sqlWhere = conn.makeWhere(filter.where);
stmt.merge(sqlWhere);
stmt.merge(conn.makePagination(filter));
const sql = ParameterizedSQL.join([stmt], ';');
return conn.executeStmt(sql, myOptions);
};
};

View File

@ -0,0 +1,127 @@
const {models} = require('vn-loopback/server/server');
describe('Vehicle filter()', () => {
const deliveryAssiId = 123;
const ctx = beforeAll.getCtx(deliveryAssiId);
let options;
let tx;
beforeEach(async() => {
ctx.args = {};
options = {};
tx = await models.Sale.beginTransaction({});
options.transaction = tx;
});
afterEach(async() => {
await tx.rollback();
});
it('should return the vehicles matching "search"', async() => {
const {id} = await models.Vehicle.findById(1, null, options);
const {numberPlate} = await models.Vehicle.findById(2, null, options);
ctx.args = {search: id};
const [searchResult] = await models.Vehicle.filter(ctx);
ctx.args = {search: numberPlate};
const [searchResult2] = await models.Vehicle.filter(ctx);
expect(searchResult.id).toEqual(id);
expect(searchResult2.numberPlate).toEqual(numberPlate);
});
it('should return the vehicles matching "companyFk"', async() => {
const company = await models.Company.findOne({where: {code: 'VNL'}}, options);
ctx.args = {companyFk: company.id};
const searchResult = await models.Vehicle.filter(ctx, null, options);
searchResult.forEach(record => {
expect(record.company).toEqual(company.code);
});
});
it('should return the vehicles matching "tradeMark"', async() => {
const tradeMark = 'WAYNE INDUSTRIES';
ctx.args = {tradeMark};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.tradeMark).toEqual(tradeMark);
});
});
it('should return the vehicles matching "numberPlate"', async() => {
const {numberPlate} = await models.Vehicle.findById(1, null, options);
ctx.args = {numberPlate};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.numberPlate).toEqual(numberPlate);
});
});
it('should return the vehicles matching "warehouseFk"', async() => {
const warehouse = await models.Warehouse.findById(1, null, options);
ctx.args = {warehouseFk: warehouse.id};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.warehouse).toEqual(warehouse.name);
});
});
it('should return the vehicles matching "chassis"', async() => {
const {chassis} = await models.Vehicle.findById(1, null, options);
ctx.args = {chassis};
const [searchResult] = await models.Vehicle.filter(ctx);
expect(searchResult.chassis).toEqual(chassis);
});
it('should return the vehicles matching "leasing"', async() => {
const leasing = 'Wayne leasing';
ctx.args = {leasing};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.leasing).toEqual(leasing);
});
});
it('should return the vehicles matching "countryCodeFk"', async() => {
const countryCodeFk = 'ES';
ctx.args = {countryCodeFk};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.countryCodeFk).toEqual(countryCodeFk);
});
});
it('should return the vehicles matching "vehicleTypeFk"', async() => {
const {name, id} = await models.VehicleType.findById(1, null, options);
ctx.args = {vehicleTypeFk: id};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.type).toEqual(name);
});
});
it('should return the vehicles matching "vehicleStateFk"', async() => {
const {state, id} = await models.VehicleState.findById(3);
ctx.args = {vehicleStateFk: id};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.state).toEqual(state);
});
});
it('should return the vehicles matching "description"', async() => {
const {description} = await models.Vehicle.findById(2);
ctx.args = {description};
const searchResult = await models.Vehicle.filter(ctx);
searchResult.forEach(record => {
expect(record.description).toEqual(description);
});
});
});

View File

@ -5,12 +5,21 @@
"AgencyTermConfig": {
"dataSource": "vn"
},
"BankPolicy": {
"dataSource": "vn"
},
"Cmr": {
"dataSource": "vn"
},
"DeliveryPoint": {
"dataSource": "vn"
},
"FuelType": {
"dataSource": "vn"
},
"Ppe": {
"dataSource": "vn"
},
"RoadmapAddress": {
"dataSource": "vn"
},
@ -35,6 +44,12 @@
"Vehicle": {
"dataSource": "vn"
},
"VehicleState": {
"dataSource": "vn"
},
"VehicleType": {
"dataSource": "vn"
},
"RoutesMonitor": {
"dataSource": "vn"
}

View File

@ -0,0 +1,21 @@
{
"name": "BankPolicy",
"base": "VnModel",
"options": {
"mysql": {
"table": "bankPolicy"
}
},
"properties": {
"id": {
"type": "number",
"id": true
},
"ref": {
"type": "string"
},
"dmsFk": {
"type": "number"
}
}
}

View File

@ -0,0 +1,30 @@
{
"name": "FuelType",
"base": "VnModel",
"options": {
"mysql": {
"table": "fuelType"
}
},
"properties": {
"id": {
"type": "number",
"id": true,
"description": "Identifier"
},
"name": {
"type": "string"
},
"code": {
"type": "string"
}
},
"acls": [
{
"accessType": "READ",
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
}
]
}

View File

@ -0,0 +1,15 @@
{
"name": "Ppe",
"base": "VnModel",
"options": {
"mysql": {
"table": "ppe"
}
},
"properties": {
"id": {
"type": "number",
"id": true
}
}
}

View File

@ -0,0 +1,21 @@
{
"name": "VehicleState",
"base": "VnModel",
"options": {
"mysql": {
"table": "vehicleState"
}
},
"properties": {
"id": {
"type": "number",
"id": true
},
"state": {
"type": "string"
},
"hasToNotify": {
"type": "number"
}
}
}

View File

@ -0,0 +1,19 @@
{
"name": "VehicleType",
"base": "VnModel",
"options": {
"mysql": {
"table": "vehicleType"
}
},
"properties": {
"id": {
"type": "number",
"id": true,
"description": "Identifier"
},
"name": {
"type": "string"
}
}
}

View File

@ -1,3 +1,4 @@
module.exports = Self => {
require('../methods/vehicle/sorted')(Self);
require('../methods/vehicle/filter')(Self);
};

View File

@ -3,7 +3,7 @@
"base": "VnModel",
"options": {
"mysql": {
"table": "vehicle"
"table": "vehicle"
}
},
"properties": {
@ -29,6 +29,39 @@
},
"isActive": {
"type": "number"
},
"countryCodeFk": {
"type": "string"
},
"chassis": {
"type": "string"
},
"leasing": {
"type": "string"
},
"isKmTruckRate": {
"type": "number"
},
"fuelTypeFk": {
"type": "number"
},
"import": {
"type": "number"
},
"importCooler": {
"type": "number"
},
"vin": {
"type": "string"
},
"ppeFk": {
"type": "number"
},
"vehicleTypeFk": {
"type": "number"
},
"deliveryPointFk": {
"type": "number"
}
},
"relations": {
@ -46,21 +79,57 @@
"type": "belongsTo",
"model": "DeliveryPoint",
"foreignKey": "deliveryPointFk"
},
"event": {
"type": "hasMany",
"model": "VehicleEvent",
"foreignKey": "vehicleFk",
"property": "id"
},
"supplier": {
"type": "belongsTo",
"model": "Supplier",
"foreignKey": "supplierFk"
},
"supplierCooler": {
"type": "belongsTo",
"model": "Supplier",
"foreignKey": "supplierCoolerFk"
},
"bankPolicy": {
"type": "belongsTo",
"model": "BankPolicy",
"foreignKey": "bankPolicyFk"
},
"fuelType": {
"type": "belongsTo",
"model": "FuelType",
"foreignKey": "fuelTypeFk"
},
"ppe": {
"type": "hasOne",
"model": "Ppe",
"foreignKey": "id",
"property": "ppeFk"
},
"type": {
"type": "hasOne",
"model": "VehicleType",
"foreignKey": "id",
"property": "vehicleTypeFk"
}
},
"scope": {
"where": {
"isActive": {
"scopes": {
"active": {
"fields": [
"id",
"numberPlate"
],
"where": {
"isActive": {
"neq": false
}
}
},
"acls": [
{
"accessType": "READ",
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
}
]
}
}

View File

@ -7,6 +7,6 @@ describe('Supplier getItemsPackaging()', () => {
expect(item.id).toEqual(1);
expect(item.name).toEqual('Ranged weapon longbow 200cm');
expect(item.quantity).toEqual(5000);
expect(item.quantityTotal).toEqual(5100);
expect(item.quantityTotal).toEqual(5200);
});
});

View File

@ -49,7 +49,7 @@ module.exports = Self => {
ps.monitorId,
e.created
FROM expedition e
JOIN host h ON Convert(h.code USING utf8mb3) COLLATE utf8mb3_unicode_ci = e.hostFk
JOIN host h ON h.code = e.hostFk
JOIN packingSite ps ON ps.hostFk = h.id
WHERE e.id = ?;`;
const [expedition] = await models.Expedition.rawSql(query, [id]);

View File

@ -44,12 +44,14 @@ module.exports = Self => {
ps.monitorId,
e.created
FROM expedition e
JOIN host h ON Convert(h.code USING utf8mb3) COLLATE utf8mb3_unicode_ci = e.hostFk
JOIN packingSite ps ON ps.hostFk = h.id
JOIN host h ON h.code = e.hostFk
JOIN packingSite ps ON ps.hostFk = h.id
WHERE e.id = ?;`;
const [expedition] = await models.PackingSiteConfig.rawSql(query, [id]);
const [expedition] = await models.PackingSiteConfig.rawSql(query, [id], myOptions);
if (!from && !expedition) return [];
let start = new Date(expedition.created);
let end = new Date(start.getTime() + (packingSiteConfig.avgBoxingTime * 1000));
@ -57,9 +59,13 @@ module.exports = Self => {
start.setHours(from, 0, 0);
end.setHours(to, 0, 0);
}
const offset = start.getTimezoneOffset();
start = new Date(start.getTime() - (offset * 60 * 1000));
end = new Date(end.getTime() - (offset * 60 * 1000));
const minutes = start.getMinutes();
const roundedMinutes = minutes - (minutes % 15);
start.setMinutes(roundedMinutes, 0, 0);
const videoUrl =
`/${packingSiteConfig.shinobiToken}/videos/${packingSiteConfig.shinobiGroupKey}/${expedition.monitorId}`;
@ -73,6 +79,7 @@ module.exports = Self => {
} catch (e) {
return [];
}
return response.data.videos.map(video => video.filename);
};
};

View File

@ -2,35 +2,28 @@ const models = require('vn-loopback/server/server').models;
const axios = require('axios');
describe('boxing getVideoList()', () => {
it('should return video list', async() => {
const tx = await models.PackingSiteConfig.beginTransaction({});
let tx;
let options;
try {
const options = {transaction: tx};
beforeEach(async() => {
tx = await models.PackingSiteConfig.beginTransaction({});
options = {transaction: tx};
});
const id = 1;
const from = 1;
const to = 2;
afterEach(async() => {
await tx.rollback();
});
const response = {
data: {
videos: [{
id: 1,
filename: 'video1.mp4'
}]
}
};
it('should make the correct API call', async() => {
const expedition = await models.Expedition.findById(15, null, options);
await expedition.updateAttribute('created', '2000-12-01 07:07:00', options);
spyOn(axios, 'get').and.returnValue(new Promise(resolve => resolve(response)));
const axiosSpy = spyOn(axios, 'get').and.callThrough();
await models.Boxing.getVideoList(expedition.id, undefined, undefined, options);
const result = await models.Boxing.getVideoList(id, from, to, options);
const expectedStartTime = '2000-12-01T07:00:00';
const calledUrl = axiosSpy.calls.mostRecent().args[0];
expect(result[0]).toEqual(response.data.videos[0].filename);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
expect(calledUrl).toContain(`start=${expectedStartTime}`);
});
});

View File

@ -30,7 +30,6 @@ module.exports = Self => {
SELECT
s.id AS saleFk,
t.id AS ticketFk,
t.landed,
s.concept,
s.itemFk,
s.quantity,
@ -41,11 +40,10 @@ module.exports = Self => {
INNER JOIN vn.sale s ON s.ticketFk = t.id
LEFT JOIN vn.claimBeginning cb ON cb.saleFk = s.id
WHERE (t.landed) >= TIMESTAMPADD(DAY, -7, ?)
AND t.id = ? AND cb.id IS NULL
ORDER BY t.landed DESC, t.id DESC`;
WHERE t.id = ?
AND cb.id IS NULL`;
const claimableSales = await Self.rawSql(query, [date, ticketFk], myOptions);
const claimableSales = await Self.rawSql(query, [ticketFk], myOptions);
return claimableSales;
};

View File

@ -0,0 +1,99 @@
module.exports = Self => {
Self.remoteMethodCtx('replaceItem', {
description: 'Replace item from sale',
accessType: 'WRITE',
accepts: [
{
arg: 'saleFk',
type: 'number',
required: true,
},
{
arg: 'substitutionFk',
type: 'number',
required: true
},
{
arg: 'quantity',
type: 'number',
required: true
}
],
returns: {
type: 'object',
root: true
},
http: {
path: `/replaceItem`,
verb: 'POST'
}
});
Self.replaceItem = async(ctx, saleFk, substitutionFk, quantity, options) => {
const myOptions = {userId: ctx.req.accessToken.userId};
let tx;
const $t = ctx.req.__;
const models = Self.app.models;
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
try {
const replaceItemQuery = {
sql: 'CALL sale_replaceItem(?,?,?)',
query: [saleFk, substitutionFk, quantity]
};
const resultReplaceItem = await Self.rawSql(replaceItemQuery.sql, replaceItemQuery.query, myOptions);
const sale = await models.Sale.findById(saleFk, {
fields: ['id', 'ticketFk', 'itemFk', 'quantity', 'price'],
include: [
{
relation: 'ticket',
scope: {
fields: ['id']
},
}, {
relation: 'item',
scope: {
fields: ['id', 'name', 'longName']
}
}
]
}, myOptions);
const salesPersonQuery = {
sql: 'SELECT vn.client_getSalesPersonByTicket(?)',
query: [sale.ticketFk]
};
const salesPerson = await Self.rawSql(salesPersonQuery.sql, salesPersonQuery.query, myOptions);
const url = await models.Url.getUrl();
const substitution = await models.Item.findById(substitutionFk, {
fields: ['id', 'name', 'longName']
}, myOptions);
const message = $t('negativeReplaced', {
oldItemId: sale.itemFk,
oldItem: sale.item().longName,
oldItemUrl: `${url}item/${sale.itemFk}/summary`,
newItemId: substitution.id,
newItem: substitution.longName,
newItemUrl: `${url}item/${substitution.id}/summary`,
ticketId: sale.ticketFk,
ticketUrl: `${url}ticket/${sale.ticketFk}/sale`
});
await models.Chat.sendCheckingPresence(ctx, salesPerson.id, message);
return resultReplaceItem;
} catch (e) {
if (tx) await tx.rollback();
throw e;
}
};
};

View File

@ -0,0 +1,61 @@
const {models} = require('vn-loopback/server/server');
describe('Sale - replaceItem function', () => {
let options;
let tx;
const ctx = beforeAll.getCtx();
beforeAll.mockLoopBackContext();
beforeEach(async() => {
tx = await models.Sale.beginTransaction({});
options = {transaction: tx};
});
afterEach(async() => {
if (tx)
await tx.rollback();
});
it('should replace full item in sale and send notification', async() => {
const saleFk = 43;
const substitutionFk = 3;
const quantity = 15;
const ticketFk = 1000000;
const salesBefore = await models.Sale.find({where: {ticketFk}}, options);
const salesLength = salesBefore.length;
expect(1).toEqual(salesBefore.length);
await models.Sale.replaceItem(ctx, saleFk, substitutionFk, quantity, options);
const salesAfter = await models.Sale.find({where: {ticketFk}}, options);
expect(salesLength).toBeLessThan(salesAfter.length);
expect(salesAfter[0].id).toEqual(saleFk);
expect(salesAfter[salesLength].itemFk).toEqual(substitutionFk);
expect(salesAfter[salesLength].quantity).toEqual(quantity);
expect(salesAfter[0].quantity).toEqual(0);
expect(salesAfter[salesLength].concept).toMatch(/^\+/);
});
it('should replace half item in sale and send notification', async() => {
const saleFk = 43;
const substitutionFk = 3;
const quantity = 10;
const ticketFk = 1000000;
const salesBefore = await models.Sale.find({where: {ticketFk}}, options);
const salesLength = salesBefore.length;
expect(1).toEqual(salesBefore.length);
await models.Sale.replaceItem(ctx, saleFk, substitutionFk, quantity, options);
const salesAfter = await models.Sale.find({where: {ticketFk}}, options);
expect(salesLength).toBeLessThan(salesAfter.length);
expect(salesAfter[0].id).toEqual(saleFk);
expect(salesAfter[salesLength].itemFk).toEqual(substitutionFk);
expect(salesAfter[salesLength].quantity).toEqual(quantity);
expect(salesAfter[0].quantity).toEqual(5);
expect(salesAfter[salesLength].concept).toMatch(/^\+/);
});
});

View File

@ -113,6 +113,12 @@ module.exports = Self => {
const salesPerson = sale.ticket().client().salesPersonUser();
if (salesPerson) {
const url = await Self.app.models.Url.getUrl();
const saleCloned = await Self.app.models.SaleCloned.findById(sale.id, {
include: 'saleOriginal',
});
const ticketWeekly = saleCloned?.saleOriginal()?.ticketFk || null;
const message = $t('Changed sale price', {
ticketId: sale.ticket().id,
itemId: sale.itemFk,
@ -121,7 +127,8 @@ module.exports = Self => {
oldPrice: oldPrice,
newPrice: newPrice,
ticketUrl: `${url}ticket/${sale.ticket().id}/sale`,
itemUrl: `${url}item/${sale.itemFk}/summary`
itemUrl: `${url}item/${sale.itemFk}/summary`,
ticketWeekly: ticketWeekly ? $t('clonedFromTicketWeekly', {ticketWeekly}) : null
});
await models.Chat.sendCheckingPresence(ctx, salesPerson.id, message, myOptions);
}

View File

@ -72,6 +72,12 @@ module.exports = Self => {
const salesPerson = sale.ticket().client().salesPersonUser();
if (salesPerson) {
const url = await Self.app.models.Url.getUrl();
const saleCloned = await Self.app.models.SaleCloned.findById(sale.id, {
include: 'saleOriginal',
});
const ticketWeekly = saleCloned?.saleOriginal()?.ticketFk || null;
const change = $t('Changes in sales', {
itemId: sale.itemFk,
concept: sale.concept,
@ -84,6 +90,7 @@ module.exports = Self => {
ticketId: sale.ticket().id,
changes: JSON.stringify(change),
ticketUrl: `${url}ticket/${sale.ticket().id}/sale`,
ticketWeekly: ticketWeekly ? $t('clonedFromTicketWeekly', {ticketWeekly}) : null
});
await models.Chat.sendCheckingPresence(ctx, salesPerson.id, message, myOptions);

View File

@ -288,21 +288,17 @@ module.exports = Self => {
stmts.push(stmt);
stmt = new ParameterizedSQL(`
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_getProblems
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT f.id ticketFk, f.clientFk, f.warehouseFk, f.shipped
FROM tmp.filter f
LEFT JOIN alertLevel al ON al.id = f.alertLevel
WHERE (al.code = 'FREE' OR f.alertLevel IS NULL)
AND f.shipped >= ?
`, [date]);
SELECT f.id ticketFk
FROM tmp.filter f`);
stmts.push(stmt);
stmts.push('CALL ticket_getProblems(FALSE)');
stmt = new ParameterizedSQL(`
UPDATE tmp.ticket_problems
UPDATE tmp.ticketProblems
SET risk = IF(hasRisk, risk, 0)
`);
stmts.push(stmt);
@ -310,43 +306,19 @@ module.exports = Self => {
stmt = new ParameterizedSQL(`
SELECT f.*, tp.*
FROM tmp.filter f
LEFT JOIN tmp.ticket_problems tp ON tp.ticketFk = f.id
LEFT JOIN tmp.ticketProblems tp ON tp.ticketFk = f.id
`);
if (args.problems != undefined && (!args.from && !args.to))
throw new UserError('Choose a date range or days forward');
let condition;
let hasProblem;
let range;
let hasWhere;
switch (args.problems) {
case true:
condition = `or`;
hasProblem = true;
range = {neq: null};
hasWhere = true;
break;
case false:
condition = `and`;
hasProblem = null;
range = null;
hasWhere = true;
break;
if (typeof args.problems == 'boolean') {
let condition = 0;
if (args.problems)
condition = {neq: condition};
stmt.merge(conn.makeWhere({'tp.totalProblems': condition}));
}
const problems = {[condition]: [
{'tp.isFreezed': hasProblem},
{'tp.hasRisk': hasProblem},
{'tp.hasTicketRequest': hasProblem},
{'tp.itemShortage': range},
{'tp.hasRounding': hasProblem}
]};
if (hasWhere)
stmt.merge(conn.makeWhere(problems));
if (filter.order) {
if (typeof filter.order == 'string') filter.order = [filter.order];
const index = filter.order.findIndex(o => o.includes('stateFk'));
@ -365,8 +337,9 @@ module.exports = Self => {
stmts.push(
`DROP TEMPORARY TABLE
tmp.ticket,
tmp.filter,
tmp.ticket_problems`);
tmp.ticketProblems`);
const sql = ParameterizedSQL.join(stmts, ';');
const result = await conn.executeStmt(sql, myOptions);

View File

@ -98,14 +98,10 @@ module.exports = Self => {
for (let sale of sales) {
const problems = saleProblems.get(sale.id);
const itemStock = itemAvailable.get(sale.itemFk);
sale.available = itemStock.available;
sale.visible = itemStock.visible;
sale.claim = claimedSales.get(sale.id);
if (problems) {
sale.itemShortage = problems.itemShortage;
sale.hasTicketRequest = problems.hasTicketRequest;
sale.hasComponentLack = problems.hasComponentLack;
for (const problem in problems)
sale[problem] = problems[problem];
}
if (salesWithLogs.includes(sale.id))
sale.$hasLogs = true;

View File

@ -0,0 +1,83 @@
const {buildFilter} = require('vn-loopback/util/filter');
const ParameterizedSQL = require('loopback-connector').ParameterizedSQL;
module.exports = Self => {
Self.remoteMethodCtx('getTicketProblems', {
description: 'Get problems for a ticket',
accessType: 'READ',
accepts: [{
arg: 'id',
type: 'number',
required: true,
description: 'The ticket id',
http: {source: 'path'}
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/:id/getTicketProblems`,
verb: 'get'
}
});
Self.getTicketProblems = async(ctx, id, options) => {
const myOptions = {};
const stmts = [];
const conn = Self.dataSource.connector;
let stmt;
const ticketId = id;
const where = buildFilter(ctx.args, param => {
switch (param) {
case 'id':
return {'t.id': ticketId};
}
});
if (typeof options == 'object')
Object.assign(myOptions, options);
stmt = new ParameterizedSQL(`
CREATE OR REPLACE TEMPORARY TABLE tmp.filter
(INDEX (id))
ENGINE = MEMORY
SELECT t.id
FROM ticket t
`);
stmt.merge(conn.makeWhere(where));
stmts.push(stmt);
stmt = new ParameterizedSQL(`
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT f.id AS ticketFk
FROM tmp.filter f
`);
stmts.push(stmt);
stmts.push('CALL ticket_getProblems(FALSE)');
stmt = new ParameterizedSQL(`
SELECT f.*, tp.*
FROM tmp.filter f
LEFT JOIN tmp.ticketProblems tp ON tp.ticketFk = f.id
`);
const ticketsIndex = stmts.push(stmt) - 1;
stmts.push(`
DROP TEMPORARY TABLE IF EXISTS
tmp.filter,
tmp.ticket,
tmp.ticketProblems
`);
const sql = ParameterizedSQL.join(stmts, ';');
const result = await conn.executeStmt(sql, myOptions);
return result[ticketsIndex];
};
};

View File

@ -146,10 +146,10 @@ module.exports = Self => {
stmts.push(stmt);
stmt = new ParameterizedSQL(`
CREATE OR REPLACE TEMPORARY TABLE tmp.sale_getProblems
CREATE OR REPLACE TEMPORARY TABLE tmp.ticket
(INDEX (ticketFk))
ENGINE = MEMORY
SELECT f.id ticketFk, f.clientFk, f.warehouseFk, f.shipped, f.lines, f.liters
SELECT f.id ticketFk
FROM tmp.filter f
LEFT JOIN alertLevel al ON al.id = f.alertLevel
WHERE (al.code = 'FREE' OR f.alertLevel IS NULL)
@ -159,7 +159,7 @@ module.exports = Self => {
stmts.push('CALL ticket_getProblems(FALSE)');
stmt = new ParameterizedSQL(`
UPDATE tmp.ticket_problems
UPDATE tmp.ticketProblems
SET risk = IF(hasRisk, risk, 0)
`);
stmts.push(stmt);
@ -167,7 +167,7 @@ module.exports = Self => {
stmt = new ParameterizedSQL(`
SELECT f.*, tp.*
FROM tmp.filter f
LEFT JOIN tmp.ticket_problems tp ON tp.ticketFk = f.id
LEFT JOIN tmp.ticketProblems tp ON tp.ticketFk = f.id
`);
if (args.problems != undefined && (!args.originScopeDays && !args.futureScopeDays))
@ -175,20 +175,17 @@ module.exports = Self => {
let condition;
let hasProblem;
let range;
let hasWhere;
switch (args.problems) {
case true:
condition = `or`;
hasProblem = true;
range = {neq: null};
hasWhere = true;
break;
case false:
condition = `and`;
hasProblem = null;
range = null;
hasWhere = true;
break;
}
@ -198,7 +195,7 @@ module.exports = Self => {
{'tp.isFreezed': hasProblem},
{'tp.hasRisk': hasProblem},
{'tp.hasTicketRequest': hasProblem},
{'tp.itemShortage': range},
{'tp.hasItemShortage': hasProblem},
{'tp.hasComponentLack': hasProblem},
{'tp.isTooLittle': hasProblem},
{'tp.hasRounding': hasProblem}
@ -216,8 +213,9 @@ module.exports = Self => {
stmts.push(
`DROP TEMPORARY TABLE
tmp.ticket,
tmp.filter,
tmp.ticket_problems`);
tmp.ticketProblems`);
const sql = ParameterizedSQL.join(stmts, ';');
const result = await conn.executeStmt(sql, myOptions);

View File

@ -0,0 +1,111 @@
module.exports = Self => {
Self.remoteMethod('itemLack', {
description: 'Get tickets as negative status',
accessType: 'READ',
accepts: [
{
arg: 'ctx',
type: 'object',
http: {source: 'context'}
},
{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, offset, and limit - must be a JSON-encoded string',
http: {source: 'query'}
},
{
arg: 'id',
type: 'number',
description: 'The item id',
},
{
arg: 'longname',
type: 'string',
description: 'Article name',
},
{
arg: 'supplier',
type: 'string',
description: 'Supplier id',
},
{
arg: 'colour',
type: 'string',
description: 'Colour\'s item',
},
{
arg: 'size',
type: 'string',
description: 'Size\'s item',
},
{
arg: 'origen',
type: 'string',
description: 'origen id',
},
{
arg: 'warehouseFk',
type: 'number',
description: 'The warehouse id',
},
{
arg: 'lack',
type: 'number',
description: 'The item id',
},
{
arg: 'days',
type: 'number',
description: 'The range days',
}
],
returns: [
{
arg: 'body',
type: ['object'],
root: true
}
],
http: {
path: `/itemLack`,
verb: 'GET'
}
});
Self.itemLack = async(ctx, filter, options) => {
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
const filterKeyOrder = [
'id', 'force', 'days', 'longname', 'supplier',
'colour', 'size', 'originFk',
'lack', 'warehouseFk'
];
delete ctx?.args?.ctx;
delete ctx?.args?.filter;
Object.assign(filter, ctx.args ?? {});
let procedureParams = [];
procedureParams.push(...filterKeyOrder.map(clave => filter[clave] ?? null));
// Default values
const forceIndex = filterKeyOrder.indexOf('force');
if (!procedureParams[forceIndex])procedureParams[forceIndex] = true;
const daysIndex = filterKeyOrder.indexOf('days');
if (!procedureParams[daysIndex])procedureParams[daysIndex] = 2;
const procedureArgs = Array(procedureParams.length).fill('?').join(', ');
let query = `CALL vn.item_getLack(${procedureArgs})`;
const result = await Self.rawSql(query, procedureParams, myOptions);
const itemsIndex = 0;
return result[itemsIndex];
};
};

View File

@ -0,0 +1,167 @@
const {ParameterizedSQL} = require('loopback-connector');
module.exports = Self => {
Self.remoteMethod('itemLackDetail', {
description: 'Retrieve detail from ticket as negative',
accessType: 'READ',
accepts: [
{
arg: 'itemFk',
type: 'number',
description: 'The item as negative status',
},
{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, offset, and limit - must be a JSON-encoded string',
http: {source: 'query'}
}
],
returns: [
{
arg: 'body',
type: ['object'],
root: true,
},
],
http: {
path: `/itemLack/:itemFk`,
verb: 'GET',
},
});
Self.itemLackDetail = async(itemFk, filter, options) => {
const conn = Self.dataSource.connector;
const myOptions = {};
if (typeof options == 'object') Object.assign(myOptions, options);
const vDated = (Date.vnNew());
vDated.setHours(0, 0, 0, 0);
const scopeDays = filter.where.scopeDays ?? 0;
let alertLevels = filter.where.alertLevelCode;
if (!alertLevels)
alertLevels = (await Self.app.models.AlertLevel.find({fields: ['code']})).map(({code}) => code);
const stmt = new ParameterizedSQL(`
SELECT s.id,
st.code,
t.id,
t.nickname,
c.id customerId,
t.shipped,
s.quantity,
ag.name,
ag.id agencyFk,
tls.alertLevel alertLevel,
st.name stateName,
s.id saleFk,
s.itemFk,
s.price price,
al.code alertLevelCode,
z.name zoneName,
z.id zoneFk,
z.hour theoreticalhour,
cn.isRookie,
sc.saleClonedFk turno,
tr.saleFk peticionCompra,
DATE_FORMAT(IF(HOUR(t.shipped), t.shipped, IF(zc.hour, zc.hour, z.hour)),'%H:%i') minTimed,
FALSE isBasket,
substitution.hasObservation,
(d.code = 'spainTeamVip') hasToIgnore
FROM sale s
LEFT JOIN saleGroupDetail sgd ON sgd.saleFk = s.id
JOIN ticket t ON t.id = s.ticketFk
LEFT JOIN zone z ON z.id = t.zoneFk
LEFT JOIN zoneClosure zc ON zc.zoneFk = t.zoneFk
AND t.shipped BETWEEN zc.dated AND util.dayEnd(t.shipped)
JOIN client c ON c.id=t.clientFk
LEFT JOIN bs.clientNewBorn cn ON cn.clientFk=c.id
JOIN agencyMode ag ON ag.id=t.agencyModeFk
JOIN ticketState tls ON tls.ticketFk=t.id
LEFT JOIN state st ON st.id=tls.state
LEFT JOIN alertLevel al ON al.id = st.alertLevel
LEFT JOIN saleCloned sc ON sc.saleClonedFk = s.id
LEFT JOIN ticketRequest tr ON tr.saleFk = s.id
LEFT JOIN workerDepartment wd ON wd.workerFk = c.salesPersonFk
LEFT JOIN department d ON d.id = wd.departmentFk
LEFT JOIN (
SELECT co.clientFk, COUNT(*) hasObservation
FROM clientObservation co
JOIN observationType ot ON ot.id = co.observationTypeFk
WHERE ot.code = 'substitution'
GROUP BY co.clientFk
) substitution ON substitution.clientFk = c.id
WHERE t.warehouseFk = ?
AND s.itemFk = ?
AND s.quantity <> 0
AND t.shipped BETWEEN ? AND (? + INTERVAL ? DAY)
AND sgd.saleFk IS NULL
AND (al.code IN (?) OR al.id IS NULL)
UNION ALL
SELECT r.id,
NULL,
r.orderFk,
c.name customerName,
c.id customerId,
r.shipment,
r.amount,
ag.name,
ag.id,
NULL,
NULL,
NULL,
r.itemFk,
NULL,
NULL,
NULL,
NULL,
NULL,
cn.isRookie,
NULL,
NULL,
NULL,
TRUE,
substitution.hasObservation,
d.code = 'spainTeamVip'
FROM hedera.orderRow r
JOIN hedera.order o ON o.id = r.orderFk
JOIN client c ON c.id = o.customer_id
JOIN agencyMode ag ON ag.id=o.agency_id
LEFT JOIN bs.clientNewBorn cn ON cn.clientFk=c.id
LEFT JOIN workerDepartment wd ON wd.workerFk = c.salesPersonFk
LEFT JOIN department d ON d.id = wd.departmentFk
LEFT JOIN (
SELECT co.clientFk, COUNT(*) hasObservation
FROM clientObservation co
JOIN observationType ot ON ot.id = co.observationTypeFk
WHERE ot.code = 'substitution'
GROUP BY co.clientFk
) substitution ON substitution.clientFk = c.id
WHERE r.shipment BETWEEN ? AND ? + INTERVAL ? DAY
AND r.created >= ?
AND r.warehouseFk = ?
AND NOT o.confirmed
AND r.itemFk = ?
AND r.amount
ORDER BY hasToIgnore, isBasket
`,
[
filter.where.warehouseFk,
itemFk,
vDated, vDated,
scopeDays,
alertLevels,
scopeDays,
vDated, vDated, vDated,
filter.where.warehouseFk,
itemFk
]);
const sql = ParameterizedSQL.join([stmt], ';');
const result = await conn.executeStmt(sql, myOptions);
return result;
};
};

View File

@ -42,11 +42,11 @@ describe('ticket filter()', () => {
const result = await models.Ticket.filter(ctx, filter, options);
const hasProblemTicket = result.some(ticket =>
ticket.isFreezed === true ||
ticket.hasRisk === true ||
ticket.hasTicketRequest === true ||
(typeof ticket.hasRounding === 'string' && ticket.hasRounding.trim().length > 0) ||
(typeof ticket.itemShortage === 'string' && ticket.itemShortage.trim().length > 0)
ticket.isFreezed == true ||
ticket.hasRisk == true ||
ticket.hasTicketRequest == true ||
ticket.hasRounding == true ||
ticket.hasItemShortage == true
);
expect(hasProblemTicket).toBe(true);
@ -80,11 +80,11 @@ describe('ticket filter()', () => {
const result = await models.Ticket.filter(ctx, filter, options);
result.forEach(ticket => {
expect(ticket.isFreezed).toEqual(null);
expect(ticket.hasRisk).toEqual(null);
expect(ticket.hasTicketRequest).toEqual(null);
expect(ticket.itemShortage).toEqual(null);
expect(ticket.hasRounding).toEqual(null);
expect(ticket.isFreezed).toEqual(0);
expect(ticket.hasRisk).toEqual(0);
expect(ticket.hasTicketRequest).toEqual(0);
expect(ticket.hasItemShortage).toEqual(0);
expect(ticket.hasRounding).toEqual(0);
});
await tx.rollback();

View File

@ -15,7 +15,6 @@ describe('ticket getSales()', () => {
expect(sales[1].item).toBeDefined();
expect(sales[2].item).toBeDefined();
expect(sales[3].item).toBeDefined();
expect(sales[0].claim).toBeDefined();
await tx.rollback();
} catch (e) {

View File

@ -0,0 +1,21 @@
const models = require('vn-loopback/server/server').models;
describe('ticket getTicketProblems()', () => {
const ctx = {req: {accessToken: 9}};
it('should return the problems of a ticket', async() => {
const tx = await models.Ticket.beginTransaction({});
try {
const options = {transaction: tx};
const problems = await models.Ticket.getTicketProblems(ctx, 11, options);
expect(problems[7].totalProblems).toEqual(3);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
});

View File

@ -0,0 +1,80 @@
const {models} = require('vn-loopback/server/server');
describe('Item Lack', () => {
let options;
let tx;
const ctx = beforeAll.getCtx();
beforeAll.mockLoopBackContext();
beforeEach(async() => {
tx = await models.Ticket.beginTransaction({});
options = {transaction: tx};
});
afterEach(async() => {
if (tx)
await tx.rollback();
});
it('should return data with NO filters', async() => {
const filter = {};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(2);
});
it('should return data with filter.id', async() => {
const filter = {
id: 5
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(1);
});
it('should return data with filter.longname', async() => {
const filter = {
longname: 'Ranged weapon pistol 9mm'
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(1);
});
it('should return data with filter.color', async() => {
const filter = {
colour: 'WHT'
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(1);
});
it('should return data with filter.origen', async() => {
const filter = {
originFk: 1
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(2);
});
it('should return data with filter.size', async() => {
const filter = {
size: '15'
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(1);
});
it('should return data with filter.lack', async() => {
const filter = {
lack: '-15'
};
const result = await models.Ticket.itemLack(ctx, filter, options);
expect(result.length).toEqual(1);
});
});

View File

@ -0,0 +1,55 @@
const models = require('vn-loopback/server/server').models;
describe('Item Lack Detail', () => {
it('should return false if id is null', async() => {
const tx = await models.Ticket.beginTransaction({});
try {
const options = {transaction: tx};
const itemFk = null;
const filter = {where: {warehouseFk: 60}};
const result = await models.Ticket.itemLackDetail(itemFk, filter, options);
expect(result.length).toEqual(0);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should return data if id exists', async() => {
const tx = await models.Ticket.beginTransaction({});
try {
const options = {transaction: tx};
const itemFk = 1167;
const filter = {where: {warehouseFk: 60}};
const result = await models.Ticket.itemLackDetail(itemFk, filter, options);
expect(result.length).toEqual(0);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should return error is if not exists', async() => {
const tx = await models.Ticket.beginTransaction({});
try {
const options = {transaction: tx};
const itemFk = 0;
const filter = {where: {warehouseFk: 60}};
const result = await models.Ticket.itemLackDetail(itemFk, filter, options);
expect(result.length).toEqual(0);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
});

View File

@ -0,0 +1,47 @@
const {models} = require('vn-loopback/server/server');
describe('Split', () => {
let options;
let tx;
const ctx = beforeAll.getCtx();
beforeAll.mockLoopBackContext();
beforeEach(async() => {
tx = await models.Ticket.beginTransaction({});
options = {transaction: tx};
});
afterEach(async() => {
if (tx)
await tx.rollback();
});
it('should split tickets with count 1', async() => {
const data =
{ticketFk: 7, sales: [1]};
const result = await models.Ticket.split(ctx, data, options);
expect(data.ticketFk).toEqual(result.ticket);
expect('noSplit').toEqual(result.status);
});
it('should split tickets with count 2 and error', async() => {
const data =
{ticketFk: 11, sales: [7]}
;
const result = await models.Ticket.split(ctx, data, options);
expect(data.ticketFk).toEqual(result.ticket);
expect('error').toEqual(result.status);
expect('Can\'t transfer claimed sales').toEqual(result.message);
});
it('should split tickets with count 2 and success', async() => {
const data =
{ticketFk: 14, sales: [33]};
const result = await models.Ticket.split(ctx, data, options);
expect(data.ticketFk).toEqual(result.ticket);
expect('split').toEqual(result.status);
});
});

View File

@ -0,0 +1,73 @@
module.exports = Self => {
Self.remoteMethodCtx('split', {
description: 'Split ticket with custom date',
accessType: 'WRITE',
accepts: [
{
arg: 'ticket',
type: 'Object',
required: true,
http: {source: 'body'}
},
{
arg: 'date',
type: 'date',
required: true,
}
],
returns: {
type: ['Object'],
root: true
},
http: {
path: `/split`,
verb: 'POST'
}
});
Self.split = async(ctx, ticket, options) => {
const {ticketFk} = ticket;
const models = Self.app.models;
const myOptions = {};
let tx;
let result = [];
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
try {
const count = await models.Sale.count({
ticketFk
}, myOptions);
if (count === 1)
return {ticket: ticketFk, status: 'noSplit'};
const [, [{vNewTicket}]] = await Self.rawSql(`
CALL vn.ticket_clone(?, @vNewTicket);
SELECT @vNewTicket vNewTicket;`,
[ticketFk], myOptions);
if (vNewTicket === 0) return result;
const sales = await models.Sale.find({
where: {id: {inq: ticket.sales}}
}, myOptions);
const updateIsPicked = sales.map(({sid}) => Self.rawSql(`
UPDATE vn.sale SET isPicked = (id = ?) WHERE ticketFk = ?`,
[sid, ticketFk], myOptions));
await Promise.all(updateIsPicked);
await Self.transferSales(ctx, ticketFk, vNewTicket, sales, myOptions);
await Self.rawSql(`CALL vn.ticket_setState(?, ?)`, [ticketFk, 'FIXING'], myOptions);
if (tx) await tx.commit();
return {ticket: ticketFk, newTicket: vNewTicket, status: 'split'};
} catch (e) {
if (tx) await tx.rollback();
return {ticket: ticketFk, status: 'error', message: e.message};
}
};
};

View File

@ -166,10 +166,18 @@ module.exports = Self => {
const salesPerson = ticket.client().salesPersonUser();
if (salesPerson) {
const url = await Self.app.models.Url.getUrl();
const saleId = sales[0].id;
const saleCloned = await Self.app.models.SaleCloned.findById(saleId, {
include: 'saleOriginal',
});
const ticketWeekly = saleCloned?.saleOriginal()?.ticketFk || null;
const message = $t('Changed sale discount', {
ticketId: id,
ticketUrl: `${url}ticket/${id}/sale`,
changes: changesMade
changes: changesMade,
ticketWeekly: ticketWeekly ? $t('clonedFromTicketWeekly', {ticketWeekly}) : null
});
await models.Chat.sendCheckingPresence(ctx, salesPerson.id, message, myOptions);
}

View File

@ -13,6 +13,7 @@ module.exports = Self => {
require('../methods/sale/usesMana')(Self);
require('../methods/sale/clone')(Self);
require('../methods/sale/getFromSectorCollection')(Self);
require('../methods/sale/replaceItem')(Self);
Self.validatesPresenceOf('concept', {
message: `Concept cannot be blank`

View File

@ -26,6 +26,12 @@
},
"defaultAttenderFk": {
"type": "number"
},
"lackAlertPrice": {
"type": "number"
},
"lackScopeDays": {
"type": "number"
}
},
"relations": {

View File

@ -46,4 +46,8 @@ module.exports = function(Self) {
require('../methods/ticket/docuwareDownload')(Self);
require('../methods/ticket/myLastModified')(Self);
require('../methods/ticket/setWeight')(Self);
require('../methods/ticket/itemLack')(Self);
require('../methods/ticket/itemLackDetail')(Self);
require('../methods/ticket/split')(Self);
require('../methods/ticket/getTicketProblems')(Self);
};

View File

@ -92,7 +92,7 @@
</td>
<td class="icon-field">
<vn-icon
ng-show="::ticket.isTaxDataChecked === 0"
ng-show="::ticket.isTaxDataChecked !== 0"
translate-attr="{title: 'No verified data'}"
class="bright"
icon="icon-no036">

View File

@ -40,7 +40,7 @@
</vn-td>
<vn-td class="icon-field">
<vn-icon
ng-show="::ticket.isTaxDataChecked === 0"
ng-show="::ticket.isTaxDataChecked !== 0"
translate-attr="{title: 'No verified data'}"
class="bright"
icon="icon-no036">

View File

@ -67,6 +67,11 @@ module.exports = Self => {
type: 'number',
description: 'The freighter supplier id'
},
{
arg: 'entrySupplierFk',
type: 'number',
description: 'The supplier of the entry(not freighter) id'
},
],
returns: {
type: ['Object'],
@ -94,6 +99,8 @@ module.exports = Self => {
return {'t.landed': {lte: value}};
case 'continent':
return {'cnt.code': value};
case 'entrySupplierFk':
return {'e.supplierFk': value};
case 'id':
case 'agencyModeFk':
case 'warehouseOutFk':

Some files were not shown because too many files have changed in this diff Show More