Merge branch 'dev' into 6533_myLogger_model

This commit is contained in:
Javier Segarra 2024-04-24 12:28:19 +02:00
commit 940aa82cb2
3463 changed files with 139342 additions and 82035 deletions

View File

@ -1,4 +1,6 @@
node_modules node_modules
print/node_modules print/node_modules
front/node_modules front
services db
e2e
storage

1
.gitignore vendored
View File

@ -10,3 +10,4 @@ print.*.json
db.json db.json
junit.xml junit.xml
.DS_Store .DS_Store
storage

33
.husky/addReferenceTag.js Normal file
View File

@ -0,0 +1,33 @@
const fs = require('fs');
const path = require('path');
function getCurrentBranchName(p = process.cwd()) {
if (!fs.existsSync(p)) return false;
const gitHeadPath = path.join(p, '.git', 'HEAD');
if (!fs.existsSync(gitHeadPath))
return getCurrentBranchName(path.resolve(p, '..'));
const headContent = fs.readFileSync(gitHeadPath, 'utf-8');
return headContent.trim().split('/')[2];
}
const branchName = getCurrentBranchName();
if (branchName) {
const msgPath = `.git/COMMIT_EDITMSG`;
const msg = fs.readFileSync(msgPath, 'utf-8');
const reference = branchName.match(/^\d+/);
const referenceTag = `refs #${reference}`;
if (!msg.includes(referenceTag) && reference) {
const splitedMsg = msg.split(':');
if (splitedMsg.length > 1) {
const finalMsg = splitedMsg[0] + ': ' + referenceTag + splitedMsg.slice(1).join(':');
fs.writeFileSync(msgPath, finalMsg);
}
}
}

8
.husky/commit-msg Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
echo "Running husky commit-msg hook"
npx --no-install commitlint --edit
echo "Adding reference tag to commit message"
node .husky/addReferenceTag.js

View File

@ -3,7 +3,7 @@
// Carácter predeterminado de final de línea. // Carácter predeterminado de final de línea.
"files.eol": "\n", "files.eol": "\n",
"editor.codeActionsOnSave": { "editor.codeActionsOnSave": {
"source.fixAll.eslint": true "source.fixAll.eslint": "explicit"
}, },
"search.useIgnoreFiles": false, "search.useIgnoreFiles": false,
"editor.defaultFormatter": "dbaeumer.vscode-eslint", "editor.defaultFormatter": "dbaeumer.vscode-eslint",
@ -11,8 +11,12 @@
"[javascript]": { "[javascript]": {
"editor.defaultFormatter": "dbaeumer.vscode-eslint" "editor.defaultFormatter": "dbaeumer.vscode-eslint"
}, },
"[json]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"cSpell.words": [ "cSpell.words": [
"salix", "salix",
"fdescribe" "fdescribe",
"Loggable"
] ]
} }

View File

@ -5,274 +5,351 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2350.01] - 2023-12-14 ## [24.20.01] - 2024-05-14
## [24.18.01] - 2024-05-07
## [24.16.01] - 2024-04-18
## [2414.01] - 2024-04-04
## [2408.01] - 2024-02-22
## [2406.01] - 2024-02-08
### Added ### Added
### Changed ### Changed
### Fixed ### Fixed
## [2404.01] - 2024-01-25
### Added
### Changed
### Fixed
## [2402.01] - 2024-01-11
### Added
### Changed
### Fixed
## [2400.01] - 2024-01-04
### Added
### Changed
### Fixed
## [2350.01] - 2023-12-14
### Características Añadidas 🆕
- **Tickets → Expediciones:** Añadido soporte para Viaexpress
## [2348.01] - 2023-11-30 ## [2348.01] - 2023-11-30
### Características Añadidas 🆕 ### Características Añadidas 🆕
- **Tickets → Adelantar:** Permite mover lineas sin generar negativos
- **Tickets → Adelantar:** Permite modificar la fecha de los tickets - **Tickets → Adelantar:** Permite mover lineas sin generar negativos
- **Trabajadores → Notificaciones:** Nueva sección (lilium) - **Tickets → Adelantar:** Permite modificar la fecha de los tickets
- **Trabajadores → Notificaciones:** Nueva sección (lilium)
### Correcciones 🛠️ ### Correcciones 🛠️
- **Tickets → RocketChat:** Arreglada detección de cambios
- **Tickets → RocketChat:** Arreglada detección de cambios
## [2346.01] - 2023-11-16 ## [2346.01] - 2023-11-16
### Added ### Added
### Changed
### Fixed
### Changed
### Fixed
## [2342.01] - 2023-11-02 ## [2342.01] - 2023-11-02
### Added ### Added
- (Usuarios -> Foto) Se muestra la foto del trabajador
### Fixed
- (Usuarios -> Historial) Abre el descriptor del usuario correctamente
- (Usuarios -> Foto) Se muestra la foto del trabajador
### Fixed
- (Usuarios -> Historial) Abre el descriptor del usuario correctamente
## [2340.01] - 2023-10-05 ## [2340.01] - 2023-10-05
## [2338.01] - 2023-09-21 ## [2338.01] - 2023-09-21
### Added ### Added
- (Ticket -> Servicios) Se pueden abonar servicios
- (Facturas -> Datos básicos) Muestra valores por defecto - (Ticket -> Servicios) Se pueden abonar servicios
- (Facturas -> Borrado) Notificación al borrar un asiento ya enlazado en Sage - (Facturas -> Datos básicos) Muestra valores por defecto
- (Facturas -> Borrado) Notificación al borrar un asiento ya enlazado en Sage
### Changed ### Changed
- (Trabajadores -> Calendario) Icono de check arreglado cuando pulsas un tipo de dia
- (Trabajadores -> Calendario) Icono de check arreglado cuando pulsas un tipo de dia
## [2336.01] - 2023-09-07 ## [2336.01] - 2023-09-07
## [2334.01] - 2023-08-24 ## [2334.01] - 2023-08-24
### Added ### Added
- (General -> Errores) Botón para enviar cau con los datos del error
- (General -> Errores) Botón para enviar cau con los datos del error
## [2332.01] - 2023-08-10 ## [2332.01] - 2023-08-10
### Added ### Added
- (Trabajadores -> Gestión documental) Soporte para Docuware
- (General -> Agencia) Soporte para Viaexpress - (Trabajadores -> Gestión documental) Soporte para Docuware
- (Tickets -> SMS) Nueva sección en Lilium - (General -> Agencia) Soporte para Viaexpress
- (Tickets -> SMS) Nueva sección en Lilium
### Changed ### Changed
- (General -> Tickets) Devuelve el motivo por el cual no es editable
- (Desplegables -> Trabajadores) Mejorados - (General -> Tickets) Devuelve el motivo por el cual no es editable
- (General -> Clientes) Razón social y dirección en mayúsculas - (Desplegables -> Trabajadores) Mejorados
- (General -> Clientes) Razón social y dirección en mayúsculas
### Fixed ### Fixed
- (Clientes -> SMS) Al pasar el ratón por encima muestra el mensaje completo
- (Clientes -> SMS) Al pasar el ratón por encima muestra el mensaje completo
## [2330.01] - 2023-07-27 ## [2330.01] - 2023-07-27
### Added ### Added
- (Artículos -> Vista Previa) Añadido campo "Plástico reciclado"
- (Rutas -> Troncales) Nueva sección - (Artículos -> Vista Previa) Añadido campo "Plástico reciclado"
- (Tickets -> Opciones) Opción establecer peso - (Rutas -> Troncales) Nueva sección
- (Clientes -> SMS) Nueva sección - (Tickets -> Opciones) Opción establecer peso
- (Clientes -> SMS) Nueva sección
### Changed ### Changed
- (General -> Iconos) Añadidos nuevos iconos
- (Clientes -> Razón social) Permite crear clientes con la misma razón social según el país
- (General -> Iconos) Añadidos nuevos iconos
- (Clientes -> Razón social) Permite crear clientes con la misma razón social según el país
## [2328.01] - 2023-07-13 ## [2328.01] - 2023-07-13
### Added ### Added
- (Clientes -> Morosos) Añadida columna "es trabajador"
- (Trabajadores -> Departamentos) Nueva sección - (Clientes -> Morosos) Añadida columna "es trabajador"
- (Trabajadores -> Departamentos) Añadido listado de Trabajadores por departamento - (Trabajadores -> Departamentos) Nueva sección
- (Trabajadores -> Departamentos) Añadido características de departamento e información - (Trabajadores -> Departamentos) Añadido listado de Trabajadores por departamento
- (Trabajadores -> Departamentos) Añadido características de departamento e información
### Changed ### Changed
### Fixed ### Fixed
- (Trabajadores -> Departamentos) Arreglado búscador
- (Trabajadores -> Departamentos) Arreglado búscador
## [2326.01] - 2023-06-29 ## [2326.01] - 2023-06-29
### Added ### Added
- (Entradas -> Correo) Al cambiar el tipo de cambio enviará un correo a las personas designadas
- (General -> Históricos) Botón para ver el estado del registro en cada punto - (Entradas -> Correo) Al cambiar el tipo de cambio enviará un correo a las personas designadas
- (General -> Históricos) Al filtar por registro se muestra todo el histórial desde que fue creado - (General -> Históricos) Botón para ver el estado del registro en cada punto
- (Tickets -> Índice) Permite enviar varios albaranes a Docuware - (General -> Históricos) Al filtar por registro se muestra todo el histórial desde que fue creado
- (Tickets -> Índice) Permite enviar varios albaranes a Docuware
### Changed ### Changed
- (General -> Históricos) Los registros se muestran agrupados por usuario y entidad
- (Facturas -> Facturación global) Optimizada, generación de PDFs y notificaciones en paralelo - (General -> Históricos) Los registros se muestran agrupados por usuario y entidad
- (Facturas -> Facturación global) Optimizada, generación de PDFs y notificaciones en paralelo
### Fixed ### Fixed
- (General -> Históricos) Duplicidades eliminadas
- (Facturas -> Facturación global) Solucionados fallos que paran el proceso - (General -> Históricos) Duplicidades eliminadas
- (Facturas -> Facturación global) Solucionados fallos que paran el proceso
## [2324.01] - 2023-06-15 ## [2324.01] - 2023-06-15
### Added ### Added
- (Tickets -> Abono) Al abonar permite crear el ticket abono con almacén o sin almmacén
- (General -> Desplegables) Mejorada eficiencia de carga de datos - (Tickets -> Abono) Al abonar permite crear el ticket abono con almacén o sin almmacén
- (General -> Históricos) Ahora, ademas de los ids, se muestra la descripión de los atributos - (General -> Desplegables) Mejorada eficiencia de carga de datos
- (General -> Históricos) Botón para hacer más ágil mostrar sólo los cambios en un registro - (General -> Históricos) Ahora, ademas de los ids, se muestra la descripión de los atributos
- (General -> Históricos) Filtro por cambios - (General -> Históricos) Botón para hacer más ágil mostrar sólo los cambios en un registro
- (General -> Históricos) Filtro por cambios
### Changed ### Changed
- (General -> Permisos) Mejorada seguridad
- (General -> Históricos) Elementos de la interfaz reorganizados para hacerla más ágil e intuitiva - (General -> Permisos) Mejorada seguridad
- (General -> Históricos) Elementos de la interfaz reorganizados para hacerla más ágil e intuitiva
### Fixed ### Fixed
- -
## [2322.01] - 2023-06-01 ## [2322.01] - 2023-06-01
### Added ### Added
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
- (Artículos -> Histórico) Filtro para mostrar lo anterior al inventario - (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
- (Trabajadores -> Nuevo trabajador) Permite elegir el método de pago - (Artículos -> Histórico) Filtro para mostrar lo anterior al inventario
- (Trabajadores -> Nuevo trabajador) Permite elegir el método de pago
### Changed ### Changed
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
- (Tickets -> Expediciones) Interfaz mejorada y contador añadido - (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
- (Tickets -> Expediciones) Interfaz mejorada y contador añadido
### Fixed ### Fixed
- (Tickets -> Líneas) Se permite hacer split de líneas al mismo ticket
- (Tickets -> Cambiar estado) Ahora muestra la lista completa de todos los estados
- (Tickets -> Líneas) Se permite hacer split de líneas al mismo ticket
- (Tickets -> Cambiar estado) Ahora muestra la lista completa de todos los estados
## [2320.01] - 2023-05-25 ## [2320.01] - 2023-05-25
### Added ### Added
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
### Changed ### Changed
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
### Fixed ### Fixed
- -
## [2318.01] - 2023-05-08 ## [2318.01] - 2023-05-08
### Added ### Added
- (Usuarios -> Histórico) Nueva sección
- (Roles -> Histórico) Nueva sección - (Usuarios -> Histórico) Nueva sección
- (Trabajadores -> Dar de alta) Permite elegir el método de pago - (Roles -> Histórico) Nueva sección
- (Trabajadores -> Dar de alta) Permite elegir el método de pago
### Changed ### Changed
- (Artículo -> Precio fijado) Modificado el buscador superior por uno lateral
- (Trabajadores -> Dar de alta) Quitada obligatoriedad del iban - (Artículo -> Precio fijado) Modificado el buscador superior por uno lateral
- (Trabajadores -> Dar de alta) Quitada obligatoriedad del iban
### Fixed ### Fixed
- (Ticket -> Boxing) Arreglado selección de horas
- (Cesta -> Índice) Optimizada búsqueda
- (Ticket -> Boxing) Arreglado selección de horas
- (Cesta -> Índice) Optimizada búsqueda
## [2314.01] - 2023-04-20 ## [2314.01] - 2023-04-20
### Added ### Added
- (Clientes -> Morosos) Ahora se puede filtrar por las columnas "Desde" y "Fecha Ú. O.". También se envia un email al comercial cuando se añade una nota.
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia - (Clientes -> Morosos) Ahora se puede filtrar por las columnas "Desde" y "Fecha Ú. O.". También se envia un email al comercial cuando se añade una nota.
- (Facturas recibidas -> Bases negativas) Nueva sección - (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
- (Facturas recibidas -> Bases negativas) Nueva sección
### Fixed ### Fixed
- (Clientes -> Morosos) Ahora se mantienen los elementos seleccionados al hacer sroll.
- (Clientes -> Morosos) Ahora se mantienen los elementos seleccionados al hacer sroll.
## [2312.01] - 2023-04-06 ## [2312.01] - 2023-04-06
### Added ### Added
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
### Changed ### Changed
- (Monitor tickets) Cuando se filtra por 'Pendiente' ya no muestra los estados de 'Previa'
- (Envíos -> Extra comunitarios) Se agrupan las entradas del mismo travel. Añadidos campos Referencia y Importe. - (Monitor tickets) Cuando se filtra por 'Pendiente' ya no muestra los estados de 'Previa'
- (Envíos -> Índice) Cambiado el buscador superior por uno lateral - (Envíos -> Extra comunitarios) Se agrupan las entradas del mismo travel. Añadidos campos Referencia y Importe.
- (Envíos -> Índice) Cambiado el buscador superior por uno lateral
## [2310.01] - 2023-03-23 ## [2310.01] - 2023-03-23
### Added ### Added
- (Trabajadores -> Control de horario) Ahora se puede confirmar/no confirmar el registro horario de cada semana desde esta sección
- (Trabajadores -> Control de horario) Ahora se puede confirmar/no confirmar el registro horario de cada semana desde esta sección
### Fixed ### Fixed
- (Clientes -> Listado extendido) Resuelto error al filtrar por clientes inactivos desde la columna "Activo"
- (General) Al pasar el ratón por encima del icono de "Borrar" en un campo, se hacía más grande afectando a la interfaz - (Clientes -> Listado extendido) Resuelto error al filtrar por clientes inactivos desde la columna "Activo"
- (General) Al pasar el ratón por encima del icono de "Borrar" en un campo, se hacía más grande afectando a la interfaz
## [2308.01] - 2023-03-09 ## [2308.01] - 2023-03-09
### Added ### Added
- (Proveedores -> Datos fiscales) Añadido checkbox 'Vies'
- (Client -> Descriptor) Nuevo icono $ con barrotes para los clientes con impago - (Proveedores -> Datos fiscales) Añadido checkbox 'Vies'
- (Trabajador -> Datos Básicos) Añadido nuevo campo Taquilla - (Client -> Descriptor) Nuevo icono $ con barrotes para los clientes con impago
- (Trabajador -> PDA) Nueva sección - (Trabajador -> Datos Básicos) Añadido nuevo campo Taquilla
- (Trabajador -> PDA) Nueva sección
### Changed ### Changed
- (Ticket -> Borrar ticket) Restringido el borrado de tickets con abono
- (Ticket -> Borrar ticket) Restringido el borrado de tickets con abono
## [2306.01] - 2023-02-23 ## [2306.01] - 2023-02-23
### Added ### Added
- (Tickets -> Datos Básicos) Mensaje de confirmación al intentar generar tickets con negativos
- (Artículos) El visible y disponible se calcula a partir de un almacén diferente dependiendo de la sección en la que te encuentres. Se ha añadido un icono que informa sobre a partir de que almacén se esta calculando. - (Tickets -> Datos Básicos) Mensaje de confirmación al intentar generar tickets con negativos
- (Artículos) El visible y disponible se calcula a partir de un almacén diferente dependiendo de la sección en la que te encuentres. Se ha añadido un icono que informa sobre a partir de que almacén se esta calculando.
### Changed ### Changed
- (General -> Inicio) Ahora permite recuperar la contraseña tanto con el correo de recuperación como el usuario
- (General -> Inicio) Ahora permite recuperar la contraseña tanto con el correo de recuperación como el usuario
### Fixed ### Fixed
- (Monitor de tickets) Cuando ordenas por columna, ya no se queda deshabilitado el botón de 'Actualizar'
- (Zone -> Días de entrega) Al hacer click en un día, muestra correctamente las zonas - (Monitor de tickets) Cuando ordenas por columna, ya no se queda deshabilitado el botón de 'Actualizar'
- (Artículos) El disponible en la vista previa se muestra correctamente - (Zone -> Días de entrega) Al hacer click en un día, muestra correctamente las zonas
- (Artículos) El disponible en la vista previa se muestra correctamente
## [2304.01] - 2023-02-09 ## [2304.01] - 2023-02-09
### Added ### Added
- (Rutas) Al descargar varias facturas se comprime en un zip
- (Trabajadores -> Nuevo trabajador) Nueva sección - (Rutas) Al descargar varias facturas se comprime en un zip
- (Tickets -> Adelantar tickets) Añadidos campos "líneas" y "litros" al ticket origen - (Trabajadores -> Nuevo trabajador) Nueva sección
- (Tickets -> Adelantar tickets) Nuevo icono muestra cuando las agencias de los tickets origen/destino son distintas - (Tickets -> Adelantar tickets) Añadidos campos "líneas" y "litros" al ticket origen
- (Tickets -> Adelantar tickets) Nuevo icono muestra cuando las agencias de los tickets origen/destino son distintas
### Changed ### Changed
- (Entradas -> Compras) Cambiados los campos "Precio Grouping/Packing" por "PVP" y "Precio" por "Coste"
- (Artículos -> Últimas entradas) Cambiados los campos "P.P.U." y "P.P.P." por "PVP" - (Entradas -> Compras) Cambiados los campos "Precio Grouping/Packing" por "PVP" y "Precio" por "Coste"
- (Rutas -> Sumario/Tickets) Actualizados campos de los tickets - (Artículos -> Últimas entradas) Cambiados los campos "P.P.U." y "P.P.P." por "PVP"
- (Proveedores -> Crear/Editar) Permite añadir Proveedores con la misma razón social pero con países distintos - (Rutas -> Sumario/Tickets) Actualizados campos de los tickets
- (Tickets -> Adelantar tickets) Cambiados selectores de estado por checks "Pendiente origen/destino" - (Proveedores -> Crear/Editar) Permite añadir Proveedores con la misma razón social pero con países distintos
- (Tickets -> Adelantar tickets) Cambiado stock de destino a origen. - (Tickets -> Adelantar tickets) Cambiados selectores de estado por checks "Pendiente origen/destino"
- (Tickets -> Adelantar tickets) Cambiado stock de destino a origen.
### Fixed ### Fixed
- (Artículos -> Etiquetas) Permite intercambiar la relevancia entre dos etiquetas.
- (Cliente -> Datos Fiscales) No se permite seleccionar 'Notificar vía e-mail' a los clientes sin e-mail - (Artículos -> Etiquetas) Permite intercambiar la relevancia entre dos etiquetas.
- (Tickets -> Datos básicos) Permite guardar la hora de envío - (Cliente -> Datos Fiscales) No se permite seleccionar 'Notificar vía e-mail' a los clientes sin e-mail
- (Tickets -> Añadir pago) Eliminado "null" en las referencias - (Tickets -> Datos básicos) Permite guardar la hora de envío
- (Tickets -> Adelantar tickets) Permite ordenar por importe - (Tickets -> Añadir pago) Eliminado "null" en las referencias
- (Tickets -> Adelantar tickets) El filtrado por encajado muestra también los tickets sin tipo de encajado - (Tickets -> Adelantar tickets) Permite ordenar por importe
- (Tickets -> Adelantar tickets) El filtrado por encajado muestra también los tickets sin tipo de encajado
## [2302.01] - 2023-01-26 ## [2302.01] - 2023-01-26
### Added ### Added
- (General -> Inicio) Permite recuperar la contraseña
- (Tickets -> Opciones) Subir albarán a Docuware - (General -> Inicio) Permite recuperar la contraseña
- (Tickets -> Opciones) Enviar correo con PDF de Docuware - (Tickets -> Opciones) Subir albarán a Docuware
- (Artículos -> Datos Básicos) Añadido campo Unidades/Caja - (Tickets -> Opciones) Enviar correo con PDF de Docuware
- (Artículos -> Datos Básicos) Añadido campo Unidades/Caja
### Changed ### Changed
- (Reclamaciones -> Descriptor) Cambiado el campo Agencia por Zona
- (Tickets -> Líneas preparadas) Actualizada sección para que sea más visual - (Reclamaciones -> Descriptor) Cambiado el campo Agencia por Zona
- (Tickets -> Líneas preparadas) Actualizada sección para que sea más visual
### Fixed ### Fixed
- (General) Al utilizar el traductor de Google se descuadraban los iconos
- (General) Al utilizar el traductor de Google se descuadraban los iconos
### Removed ### Removed
- (Tickets -> Control clientes) Eliminada sección
- (Tickets -> Control clientes) Eliminada sección

309
Jenkinsfile vendored
View File

@ -1,144 +1,255 @@
#!/usr/bin/env groovy #!/usr/bin/env groovy
def PROTECTED_BRANCH
def FROM_GIT
def RUN_TESTS
def RUN_BUILD
def BRANCH_ENV = [
test: 'test',
master: 'production'
]
node {
stage('Setup') {
env.BACK_REPLICAS = 1
env.NODE_ENV = BRANCH_ENV[env.BRANCH_NAME] ?: 'dev'
PROTECTED_BRANCH = [
'dev',
'test',
'master'
].contains(env.BRANCH_NAME)
FROM_GIT = env.JOB_NAME.startsWith('gitea/')
RUN_TESTS = !PROTECTED_BRANCH && FROM_GIT
RUN_BUILD = PROTECTED_BRANCH && FROM_GIT
// env.DEBUG = 'strong-remoting:shared-method'
// https://www.jenkins.io/doc/book/pipeline/jenkinsfile/#using-environment-variables
echo "NODE_NAME: ${env.NODE_NAME}"
echo "WORKSPACE: ${env.WORKSPACE}"
configFileProvider([
configFile(fileId: 'salix.properties',
variable: 'PROPS_FILE')
]) {
def props = readProperties file: PROPS_FILE
props.each {key, value -> env."${key}" = value }
props.each {key, value -> echo "${key}: ${value}" }
}
if (PROTECTED_BRANCH) {
configFileProvider([
configFile(fileId: "salix.branch.${env.BRANCH_NAME}",
variable: 'BRANCH_PROPS_FILE')
]) {
def props = readProperties file: BRANCH_PROPS_FILE
props.each {key, value -> env."${key}" = value }
props.each {key, value -> echo "${key}: ${value}" }
}
}
}
}
pipeline { pipeline {
agent any agent any
options { options {
disableConcurrentBuilds() disableConcurrentBuilds()
} }
tools {
nodejs 'node-v20'
}
environment { environment {
PROJECT_NAME = 'salix' PROJECT_NAME = 'salix'
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}" STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
} }
stages { stages {
stage('Checkout') {
steps {
script {
switch (env.BRANCH_NAME) {
case 'master':
env.NODE_ENV = 'production'
env.BACK_REPLICAS = 4
break
case 'test':
env.NODE_ENV = 'test'
env.BACK_REPLICAS = 2
break
}
}
configFileProvider([
configFile(fileId: "salix.groovy",
variable: 'GROOVY_FILE')
]) {
load env.GROOVY_FILE
}
setEnv()
}
}
stage('Install') { stage('Install') {
environment { environment {
NODE_ENV = "" NODE_ENV = ''
}
steps {
nodejs('node-v20') {
sh 'npm install --no-audit --prefer-offline'
sh 'gulp install --ci'
}
}
}
stage('Test') {
when { not { anyOf {
branch 'test'
branch 'master'
}}}
environment {
NODE_ENV = ""
TZ = 'Europe/Madrid'
} }
parallel { parallel {
stage('Frontend') { stage('Back') {
steps { steps {
nodejs('node-v20') { sh 'pnpm install --prefer-offline'
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=2' sh 'node node_modules/puppeteer/install.mjs'
}
}
stage('Print') {
when {
expression { FROM_GIT }
}
steps {
sh 'pnpm install --prefer-offline --prefix=print'
}
}
stage('Front') {
when {
expression { FROM_GIT }
}
steps {
sh 'pnpm install --prefer-offline --prefix=front'
}
}
}
}
stage('Stack') {
parallel {
stage('Back') {
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
steps {
sh 'node back/tests.js --ci --junit --network jenkins'
}
post {
always {
junit(
testResults: 'junitresults.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when {
expression { RUN_BUILD }
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh 'docker-compose build back'
}
} }
} }
} }
stage('Backend') { stage('Front') {
steps { when {
nodejs('node-v20') { expression { FROM_GIT }
sh 'npm run test:back:ci' }
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
steps {
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=10'
}
post {
always {
junit(
testResults: 'junit.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when {
expression { RUN_BUILD }
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh 'gulp build'
sh 'docker-compose build front'
}
} }
} }
} }
} }
} }
stage('Build') { stage('Push') {
when { anyOf { when {
branch 'test' expression { RUN_BUILD }
branch 'master' }
}}
environment { environment {
CREDENTIALS = credentials('docker-registry') CREDENTIALS = credentials('docker-registry')
} }
steps { steps {
nodejs('node-v20') { script {
sh 'gulp build' def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
} }
sh 'docker login --username $CREDENTIALS_USR --password $CREDENTIALS_PSW $REGISTRY'
dockerBuild() sh 'docker-compose push'
} }
} }
stage('Deploy') { stage('Deploy') {
when { anyOf { when {
branch 'test' expression { PROTECTED_BRANCH }
branch 'master'
}}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
} }
steps { parallel {
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}" stage('Database') {
} steps {
} configFileProvider([
stage('Database') { configFile(fileId: "config.${env.NODE_ENV}.ini",
when { anyOf { variable: 'MYSQL_CONFIG')
branch 'test' ]) {
branch 'master' sh 'mkdir -p db/remotes'
}} sh 'cp "$MYSQL_CONFIG" db/remotes/$NODE_ENV.ini'
steps { }
configFileProvider([
configFile(fileId: "config.${env.NODE_ENV}.ini",
variable: 'MYSQL_CONFIG')
]) {
sh 'cp "$MYSQL_CONFIG" db/config.$NODE_ENV.ini'
}
sh 'db/import-changes.sh -f $NODE_ENV' sh 'npx myt push $NODE_ENV --force --commit'
}
}
stage('Docker') {
when {
expression { FROM_GIT }
}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
}
}
} }
} }
} }
post { post {
always { success {
script { script {
if (!['master', 'test'].contains(env.BRANCH_NAME)) { if (env.BRANCH_NAME == 'master' && FROM_GIT) {
try { env.GIT_COMMIT_MSG = sh(
junit 'junitresults.xml' script: 'git log -1 --pretty=%B ${GIT_COMMIT}',
junit 'junit.xml' returnStdout: true
} catch (e) { ).trim()
echo e.toString()
}
}
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return; String message = env.GIT_COMMIT_MSG
try { int index = message.indexOf('\n')
mail( if (index != -1)
to: env.COMMITTER_EMAIL, message = message.substring(0, index)
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
body: "Check status at ${env.BUILD_URL}" setEnv()
rocketSend(
channel: 'vn-database',
message: "*DB version uploaded:* ${message}"
+"\n$COMMITTER_EMAIL ($BRANCH_NAME)"
+"\n$RUN_DISPLAY_URL",
rawMessage: true
) )
} catch (e) {
echo e.toString()
} }
} }
} }
unsuccessful {
setEnv()
sendEmail()
}
} }
} }

View File

@ -13,7 +13,7 @@ RUN apt-get update \
graphicsmagick \ graphicsmagick \
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \ && apt-get install -y --no-install-recommends nodejs \
&& npm install -g npm@9.6.6 && corepack enable pnpm
# Puppeteer # Puppeteer
@ -39,12 +39,12 @@ RUN apt-get update \
WORKDIR /salix WORKDIR /salix
COPY print/package.json print/package-lock.json print/ COPY print/package.json print/pnpm-lock.yaml print/
RUN npm --prefix ./print install --omit=dev ./print RUN pnpm install --prod --prefix=print
COPY package.json package-lock.json ./ COPY package.json pnpm-lock.yaml ./
COPY loopback/package.json loopback/ COPY loopback/package.json loopback/
RUN npm install --omit=dev RUN pnpm install --prod
COPY loopback loopback COPY loopback loopback
COPY back back COPY back back

View File

@ -3,14 +3,14 @@ const {models} = require('vn-loopback/server/server');
describe('Chat send()', () => { describe('Chat send()', () => {
it('should return true as response', async() => { it('should return true as response', async() => {
let ctx = {req: {accessToken: {userId: 1}}}; let ctx = {req: {accessToken: {userId: 1}}};
let response = await models.Chat.send(ctx, '@salesPerson', 'I changed something'); let response = await models.Chat.send(ctx, '@salesperson', 'I changed something');
expect(response).toEqual(true); expect(response).toEqual(true);
}); });
it('should return false as response', async() => { it('should return false as response', async() => {
let ctx = {req: {accessToken: {userId: 18}}}; let ctx = {req: {accessToken: {userId: 18}}};
let response = await models.Chat.send(ctx, '@salesPerson', 'I changed something'); let response = await models.Chat.send(ctx, '@salesperson', 'I changed something');
expect(response).toEqual(false); expect(response).toEqual(false);
}); });

View File

@ -0,0 +1,32 @@
const UserError = require('vn-loopback/util/user-error');
module.exports = Self => {
Self.remoteMethodCtx('assign', {
description: 'Assign a collection',
accessType: 'WRITE',
http: {
path: `/assign`,
verb: 'POST'
},
returns: {
type: ['object'],
root: true
},
});
Self.assign = async(ctx, options) => {
const userId = ctx.req.accessToken.userId;
const myOptions = {userId};
if (typeof options == 'object')
Object.assign(myOptions, options);
const [, , [{collectionFk}]] =
await Self.rawSql('CALL vn.collection_assign(?, @vCollectionFk); SELECT @vCollectionFk collectionFk',
[userId], myOptions);
if (!collectionFk) throw new UserError('There are not picking tickets');
await Self.rawSql('CALL vn.collection_printSticker(?, NULL)', [collectionFk], myOptions);
return collectionFk;
};
};

View File

@ -0,0 +1,158 @@
module.exports = Self => {
Self.remoteMethodCtx('getSales', {
description: 'Get sales from ticket or collection',
accessType: 'READ',
accepts: [
{
arg: 'collectionOrTicketFk',
type: 'number',
required: true
}, {
arg: 'print',
type: 'boolean',
required: true
}, {
arg: 'source',
type: 'string',
required: true
},
],
returns: {
type: 'Object',
root: true
},
http: {
path: `/getSales`,
verb: 'GET'
},
});
Self.getSales = async(ctx, collectionOrTicketFk, print, source, options) => {
const models = Self.app.models;
const userId = ctx.req.accessToken.userId;
const myOptions = {userId};
const $t = ctx.req.__;
if (typeof options == 'object')
Object.assign(myOptions, options);
const [{id}] = await Self.rawSql('SELECT vn.ticket_get(?) as id',
[collectionOrTicketFk],
myOptions);
const [tickets] = await Self.rawSql('CALL vn.collection_getTickets(?)', [id], myOptions);
if (source) {
await Self.rawSql(
'CALL vn.ticketStateToday_setState(?,?)', [id, source], myOptions
);
}
const [sales] = await Self.rawSql('CALL vn.sale_getFromTicketOrCollection(?)',
[id], myOptions);
const isPicker = source != 'CHECKER';
const [placements] = await Self.rawSql('CALL vn.collectionPlacement_get(?, ?)',
[id, isPicker], myOptions
);
if (print) await Self.rawSql('CALL vn.collection_printSticker(?,NULL)', [id], myOptions);
for (let ticket of tickets) {
let observations = ticket.observaciones.split(' ');
for (let observation of observations) {
const salesPerson = ticket.salesPersonFk;
if (observation.startsWith('#') || observation.startsWith('@')) {
await models.Chat.send(ctx,
observation,
$t('ticketCommercial', {ticket: ticket.ticketFk, salesPerson})
);
}
}
}
return getCollection(id, tickets, sales, placements, myOptions);
};
async function getCollection(id, tickets, sales, placements, options) {
const collection = {
collectionFk: id,
tickets: [],
};
for (let ticket of tickets) {
const {ticketFk} = ticket;
ticket.sales = [];
const barcodes = await getBarcodes(ticketFk, options);
await Self.rawSql(
'CALL util.log_add(?, ?, ?, ?, ?, ?, ?, ?)',
['vn', 'ticket', 'Ticket', ticketFk, ticketFk, 'select', null, null],
options
);
for (let sale of sales) {
if (sale.ticketFk == ticketFk) {
sale.placements = [];
for (const salePlacement of placements) {
if (salePlacement.saleFk == sale.saleFk && salePlacement.order) {
const placement = {
saleFk: salePlacement.saleFk,
itemFk: salePlacement.itemFk,
placement: salePlacement.placement,
shelving: salePlacement.shelving,
created: salePlacement.created,
visible: salePlacement.visible,
order: salePlacement.order,
grouping: salePlacement.grouping,
priority: salePlacement.priority,
saleOrder: salePlacement.saleOrder,
isPreviousPrepared: salePlacement.isPreviousPrepared,
itemShelvingSaleFk: salePlacement.itemShelvingSaleFk,
ticketFk: salePlacement.ticketFk,
id: salePlacement.id
};
sale.placements.push(placement);
}
}
sale.barcodes = [];
for (const barcode of barcodes) {
if (barcode.movementId == sale.saleFk) {
if (barcode.code) {
sale.barcodes.push(barcode.code);
sale.barcodes.push(`0 ${barcode.code}`);
}
if (barcode.id) {
sale.barcodes.push(barcode.id);
sale.barcodes.push(`0 ${barcode.id}`);
}
}
}
ticket.sales.push(sale);
}
}
collection.tickets.push(ticket);
}
return collection;
}
async function getBarcodes(ticketId, options) {
const query =
`SELECT s.id movementId,
b.code,
c.id
FROM vn.sale s
LEFT JOIN vn.itemBarcode b ON b.itemFk = s.itemFk
LEFT JOIN vn.buy c ON c.itemFk = s.itemFk
LEFT JOIN vn.entry e ON e.id = c.entryFk
LEFT JOIN vn.travel tr ON tr.id = e.travelFk
WHERE s.ticketFk = ?
AND tr.landed >= DATE_SUB(CURDATE(), INTERVAL 1 YEAR)`;
return Self.rawSql(query, [ticketId], options);
}
};

View File

@ -1,20 +0,0 @@
module.exports = Self => {
Self.remoteMethod('getSectors', {
description: 'Get all sectors',
accessType: 'READ',
returns: {
type: 'Object',
root: true
},
http: {
path: `/getSectors`,
verb: 'GET'
}
});
Self.getSectors = async() => {
const query = `CALL vn.sector_get()`;
const [result] = await Self.rawSql(query);
return result;
};
};

View File

@ -0,0 +1,38 @@
const models = require('vn-loopback/server/server').models;
const LoopBackContext = require('loopback-context');
describe('ticket assign()', () => {
let ctx;
let options;
let tx;
beforeEach(async() => {
ctx = {
req: {
accessToken: {userId: 1106},
headers: {origin: 'http://localhost'},
__: value => value
},
args: {}
};
spyOn(LoopBackContext, 'getCurrentContext').and.returnValue({
active: ctx.req
});
options = {transaction: tx};
tx = await models.Sale.beginTransaction({});
options.transaction = tx;
});
afterEach(async() => {
await tx.rollback();
});
it('should throw an error when there is not picking tickets', async() => {
try {
await models.Collection.assign(ctx, options);
} catch (e) {
expect(e.message).toEqual('There are not picking tickets');
}
});
});

View File

@ -0,0 +1,62 @@
const {models} = require('vn-loopback/server/server');
describe('collection getSales()', () => {
const collectionOrTicketFk = 999999;
const print = true;
const source = 'CHECKER';
beforeAll(() => {
ctx = {
req: {
accessToken: {userId: 9},
headers: {origin: 'http://localhost'},
}
};
});
it('should return a collection with tickets, placements and barcodes settled correctly', async() => {
const tx = await models.Collection.beginTransaction({});
const options = {transaction: tx};
try {
const collection = await models.Collection.getSales(ctx,
collectionOrTicketFk, print, source, options);
const [firstTicket] = collection.tickets;
const [firstSale] = firstTicket.sales;
const [firstPlacement] = firstSale.placements;
expect(collection.tickets.length).toBeTruthy();
expect(collection.collectionFk).toEqual(firstTicket.ticketFk);
expect(firstSale.ticketFk).toEqual(firstTicket.ticketFk);
expect(firstSale.placements.length).toBeTruthy();
expect(firstSale.barcodes.length).toBeTruthy();
expect(firstSale.saleFk).toEqual(firstPlacement.saleFk);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should print a sticker', async() => {
const tx = await models.Collection.beginTransaction({});
const options = {transaction: tx};
const query = 'SELECT * FROM printQueue pq JOIN printQueueArgs pqa ON pqa.printQueueFk = pq.id';
try {
const printQueueBefore = await models.Collection.rawSql(
query, [], options);
await models.Collection.getSales(ctx,
collectionOrTicketFk, true, source, options);
const printQueueAfter = await models.Collection.rawSql(
query, [], options);
expect(printQueueAfter.length).toEqual(printQueueBefore.length + 1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
});

View File

@ -1,11 +0,0 @@
const {models} = require('vn-loopback/server/server');
describe('getSectors()', () => {
it('return list of sectors', async() => {
let response = await models.Collection.getSectors();
expect(response.length).toBeGreaterThan(0);
expect(response[0].id).toEqual(1);
expect(response[0].description).toEqual('First sector');
});
});

View File

@ -29,7 +29,8 @@ module.exports = Self => {
http: { http: {
path: `/:id/downloadFile`, path: `/:id/downloadFile`,
verb: 'GET' verb: 'GET'
} },
accessScopes: ['DEFAULT', 'read:multimedia']
}); });
Self.downloadFile = async function(ctx, id) { Self.downloadFile = async function(ctx, id) {

View File

@ -22,8 +22,8 @@ module.exports = Self => {
Self.removeFile = async(ctx, id, options) => { Self.removeFile = async(ctx, id, options) => {
const models = Self.app.models; const models = Self.app.models;
let tx;
const myOptions = {}; const myOptions = {};
let tx;
if (typeof options == 'object') if (typeof options == 'object')
Object.assign(myOptions, options); Object.assign(myOptions, options);

View File

@ -49,7 +49,6 @@ module.exports = Self => {
Self.uploadFile = async(ctx, options) => { Self.uploadFile = async(ctx, options) => {
const models = Self.app.models; const models = Self.app.models;
const TempContainer = models.TempContainer; const TempContainer = models.TempContainer;
const DmsContainer = models.DmsContainer;
const fileOptions = {}; const fileOptions = {};
const args = ctx.args; const args = ctx.args;
@ -79,19 +78,21 @@ module.exports = Self => {
const addedDms = []; const addedDms = [];
for (const uploadedFile of files) { for (const uploadedFile of files) {
const newDms = await createDms(ctx, uploadedFile, myOptions);
const pathHash = DmsContainer.getHash(newDms.id);
const file = await TempContainer.getFile(tempContainer.name, uploadedFile.name); const file = await TempContainer.getFile(tempContainer.name, uploadedFile.name);
srcFile = path.join(file.client.root, file.container, file.name); srcFile = path.join(file.client.root, file.container, file.name);
const dmsContainer = await DmsContainer.container(pathHash); const data = {
const dstFile = path.join(dmsContainer.client.root, pathHash, newDms.file); workerFk: ctx.req.accessToken.userId,
dmsTypeFk: args.dmsTypeId,
await fs.move(srcFile, dstFile, { companyFk: args.companyId,
overwrite: true warehouseFk: args.warehouseId,
}); reference: args.reference,
description: args.description,
contentType: uploadedFile.type,
hasFile: args.hasFile
};
const extension = await models.DmsContainer.getFileExtension(uploadedFile.name);
const newDms = await Self.createFromFile(data, extension, srcFile, myOptions);
addedDms.push(newDms); addedDms.push(newDms);
} }
@ -107,27 +108,4 @@ module.exports = Self => {
throw e; throw e;
} }
}; };
async function createDms(ctx, file, myOptions) {
const models = Self.app.models;
const myUserId = ctx.req.accessToken.userId;
const args = ctx.args;
const newDms = await Self.create({
workerFk: myUserId,
dmsTypeFk: args.dmsTypeId,
companyFk: args.companyId,
warehouseFk: args.warehouseId,
reference: args.reference,
description: args.description,
contentType: file.type,
hasFile: args.hasFile
}, myOptions);
let fileName = file.name;
const extension = models.DmsContainer.getFileExtension(fileName);
fileName = `${newDms.id}.${extension}`;
return newDms.updateAttribute('file', fileName, myOptions);
}
}; };

View File

@ -42,7 +42,8 @@ module.exports = Self => {
http: { http: {
path: `/:id/download`, path: `/:id/download`,
verb: 'GET' verb: 'GET'
} },
accessScopes: ['DEFAULT', 'read:multimedia']
}); });
Self.download = async function(id, fileCabinet, filter) { Self.download = async function(id, fileCabinet, filter) {

View File

@ -24,15 +24,40 @@ describe('docuware upload()', () => {
}); });
it('should try upload file', async() => { it('should try upload file', async() => {
const tx = await models.Docuware.beginTransaction({});
spyOn(ticketModel, 'deliveryNotePdf').and.returnValue(new Promise(resolve => resolve({}))); spyOn(ticketModel, 'deliveryNotePdf').and.returnValue(new Promise(resolve => resolve({})));
let error; let error;
try { try {
await models.Docuware.upload(ctx, ticketIds, fileCabinetName); const options = {transaction: tx};
const user = await models.UserConfig.findById(userId, null, options);
await user.updateAttribute('tabletFk', 'Tablet1', options);
await models.Docuware.upload(ctx, ticketIds, fileCabinetName, options);
await tx.rollback();
} catch (e) { } catch (e) {
error = e.message; error = e;
await tx.rollback();
} }
expect(error).toEqual('Action not allowed on the test environment'); expect(error.message).toEqual('Action not allowed on the test environment');
});
it('should throw error when not have tablet assigned', async() => {
const tx = await models.Docuware.beginTransaction({});
spyOn(ticketModel, 'deliveryNotePdf').and.returnValue(new Promise(resolve => resolve({})));
let error;
try {
const options = {transaction: tx};
await models.Docuware.upload(ctx, ticketIds, fileCabinetName, options);
await tx.rollback();
} catch (e) {
error = e;
await tx.rollback();
}
expect(error.message).toEqual('This user does not have an assigned tablet');
}); });
}); });

View File

@ -29,12 +29,24 @@ module.exports = Self => {
} }
}); });
Self.upload = async function(ctx, ticketIds, fileCabinet) { Self.upload = async function(ctx, ticketIds, fileCabinet, options) {
delete ctx.args.ticketIds; delete ctx.args.ticketIds;
const models = Self.app.models; const models = Self.app.models;
const action = 'store'; const action = 'store';
const options = await Self.getOptions(); const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
const userConfig = await models.UserConfig.findById(ctx.req.accessToken.userId, {
fields: ['tabletFk']
}, myOptions);
if (!userConfig?.tabletFk)
throw new UserError('This user does not have an assigned tablet');
const docuwareOptions = await Self.getOptions();
const fileCabinetId = await Self.getFileCabinet(fileCabinet); const fileCabinetId = await Self.getFileCabinet(fileCabinet);
const dialogId = await Self.getDialog(fileCabinet, action, fileCabinetId); const dialogId = await Self.getDialog(fileCabinet, action, fileCabinetId);
@ -45,7 +57,7 @@ module.exports = Self => {
const deliveryNote = await models.Ticket.deliveryNotePdf(ctx, { const deliveryNote = await models.Ticket.deliveryNotePdf(ctx, {
id, id,
type: 'deliveryNote' type: 'deliveryNote'
}); }, myOptions);
// get ticket data // get ticket data
const ticket = await models.Ticket.findById(id, { const ticket = await models.Ticket.findById(id, {
include: [{ include: [{
@ -54,7 +66,7 @@ module.exports = Self => {
fields: ['id', 'name', 'fi'] fields: ['id', 'name', 'fi']
} }
}] }]
}); }, myOptions);
// upload file // upload file
const templateJson = { const templateJson = {
@ -102,7 +114,7 @@ module.exports = Self => {
{ {
'FieldName': 'FILTRO_TABLET', 'FieldName': 'FILTRO_TABLET',
'ItemElementName': 'string', 'ItemElementName': 'string',
'Item': 'Tablet1', 'Item': userConfig.tabletFk,
} }
] ]
}; };
@ -116,11 +128,11 @@ module.exports = Self => {
const deleteJson = { const deleteJson = {
'Field': [{'FieldName': 'ESTADO', 'Item': 'Pendiente eliminar', 'ItemElementName': 'String'}] 'Field': [{'FieldName': 'ESTADO', 'Item': 'Pendiente eliminar', 'ItemElementName': 'String'}]
}; };
const deleteUri = `${options.url}/FileCabinets/${fileCabinetId}/Documents/${docuwareFile.id}/Fields`; const deleteUri = `${docuwareOptions.url}/FileCabinets/${fileCabinetId}/Documents/${docuwareFile.id}/Fields`;
await axios.put(deleteUri, deleteJson, options.headers); await axios.put(deleteUri, deleteJson, docuwareOptions.headers);
} }
const uploadUri = `${options.url}/FileCabinets/${fileCabinetId}/Documents?StoreDialogId=${dialogId}`; const uploadUri = `${docuwareOptions.url}/FileCabinets/${fileCabinetId}/Documents?StoreDialogId=${dialogId}`;
const FormData = require('form-data'); const FormData = require('form-data');
const data = new FormData(); const data = new FormData();
@ -130,7 +142,7 @@ module.exports = Self => {
headers: { headers: {
'Content-Type': 'multipart/form-data', 'Content-Type': 'multipart/form-data',
'X-File-ModifiedDate': Date.vnNew(), 'X-File-ModifiedDate': Date.vnNew(),
'Cookie': options.headers.headers.Cookie, 'Cookie': docuwareOptions.headers.headers.Cookie,
...data.getHeaders() ...data.getHeaders()
}, },
}; };
@ -141,11 +153,11 @@ module.exports = Self => {
const $t = ctx.req.__; const $t = ctx.req.__;
const message = $t('Failed to upload delivery note', {id}); const message = $t('Failed to upload delivery note', {id});
if (uploaded.length) if (uploaded.length)
await models.TicketTracking.setDelivered(ctx, uploaded); await models.TicketTracking.setDelivered(ctx, uploaded, myOptions);
throw new UserError(message); throw new UserError(message);
} }
uploaded.push(id); uploaded.push(id);
} }
return models.TicketTracking.setDelivered(ctx, ticketIds); return models.TicketTracking.setDelivered(ctx, ticketIds, myOptions);
}; };
}; };

View File

@ -47,7 +47,8 @@ module.exports = Self => {
http: { http: {
path: `/:collection/:size/:id/download`, path: `/:collection/:size/:id/download`,
verb: 'GET' verb: 'GET'
} },
accessScopes: ['DEFAULT', 'read:multimedia']
}); });
Self.download = async function(ctx, collection, size, id) { Self.download = async function(ctx, collection, size, id) {
@ -87,6 +88,6 @@ module.exports = Self => {
await fs.access(file.path); await fs.access(file.path);
const stream = fs.createReadStream(file.path); const stream = fs.createReadStream(file.path);
return [stream, file.contentType, `filename="${file.name}"`]; return [stream, file.contentType, `filename="${fileName}"`];
}; };
}; };

View File

@ -4,20 +4,21 @@ describe('image download()', () => {
const collection = 'user'; const collection = 'user';
const size = '160x160'; const size = '160x160';
const employeeId = 1; const employeeId = 1;
const developerId = 9;
const jessicaJonesId = 1110;
const ctx = {req: {accessToken: {userId: employeeId}}}; const ctx = {req: {accessToken: {userId: employeeId}}};
it('should return the image content-type of the user', async() => { it('should return the image content-type of the user', async() => {
const userId = 9; const image = await models.Image.download(ctx, collection, size, developerId);
const image = await models.Image.download(ctx, collection, size, userId);
const contentType = image[1]; const contentType = image[1];
expect(contentType).toEqual('image/png'); expect(contentType).toEqual('image/png');
}); });
it(`should return false if the user doesn't have image`, async() => { it('should return the user profile picture', async() => {
const userId = 1110; const image = await models.Image.download(ctx, collection, size, jessicaJonesId);
const image = await models.Image.download(ctx, collection, size, userId); const fileName = image[2];
expect(image).toBeFalse(); expect(fileName).toMatch('1110.png');
}); });
}); });

View File

@ -95,10 +95,7 @@ describe('image upload()', () => {
spyOn(containerModel, 'upload'); spyOn(containerModel, 'upload');
const ctx = {req: {accessToken: {userId: hhrrId}}, const ctx = {req: {accessToken: {userId: hhrrId}},
args: { args: {id: itemId, collection: 'user'}
id: itemId,
collection: 'user'
}
}; };
try { try {
@ -109,7 +106,7 @@ describe('image upload()', () => {
}); });
it('should try to upload a file for the collection "catalog" and throw a privilege error', async() => { it('should try to upload a file for the collection "catalog" and throw a privilege error', async() => {
const ctx = {req: {accessToken: {userId: hhrrId}}, const ctx = {req: {accessToken: {userId: 1}},
args: { args: {
id: workerId, id: workerId,
collection: 'catalog' collection: 'catalog'

View File

@ -0,0 +1,132 @@
const {models} = require('vn-loopback/server/server');
describe('machineWorker updateInTime()', () => {
const itBoss = 104;
const davidCharles = 1106;
beforeAll(async() => {
ctx = {
req: {
accessToken: {},
headers: {origin: 'http://localhost'},
__: value => value
}
};
});
it('should throw an error if the plate does not exist', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
const plate = 'RE-123';
ctx.req.accessToken.userId = 1106;
try {
await models.MachineWorker.updateInTime(ctx, plate, options);
await tx.rollback();
} catch (e) {
const error = e;
expect(error.message).toContain('the plate does not exist');
await tx.rollback();
}
});
it('should grab a machine where is not in use', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
const plate = 'RE-003';
ctx.req.accessToken.userId = 1107;
try {
const totalBefore = await models.MachineWorker.find(null, options);
await models.MachineWorker.updateInTime(ctx, plate, options);
const totalAfter = await models.MachineWorker.find(null, options);
expect(totalAfter.length).toEqual(totalBefore.length + 1);
await tx.rollback();
} catch (e) {
await tx.rollback();
}
});
describe('less than 12h', () => {
const plate = 'RE-001';
it('should trow an error if it is not himself', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
ctx.req.accessToken.userId = davidCharles;
try {
await models.MachineWorker.updateInTime(ctx, plate, options);
await tx.rollback();
} catch (e) {
const error = e;
expect(error.message).toContain('This machine is already in use');
await tx.rollback();
}
});
it('should throw an error if it is himself with a different machine', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
ctx.req.accessToken.userId = itBoss;
const plate = 'RE-003';
try {
await models.MachineWorker.updateInTime(ctx, plate, options);
await tx.rollback();
} catch (e) {
const error = e;
expect(error.message).toEqual('You are already using a machine');
await tx.rollback();
}
});
it('should set the out time if it is himself', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
ctx.req.accessToken.userId = itBoss;
try {
const isNotParked = await models.MachineWorker.findOne({
where: {workerFk: itBoss}
}, options);
await models.MachineWorker.updateInTime(ctx, plate, options);
const isParked = await models.MachineWorker.findOne({
where: {workerFk: itBoss}
}, options);
expect(isNotParked.outTime).toBeNull();
expect(isParked.outTime).toBeDefined();
await tx.rollback();
} catch (e) {
await tx.rollback();
}
});
});
describe('equal or more than 12h', () => {
const plate = 'RE-002';
it('should set the out time and grab the machine', async() => {
const tx = await models.MachineWorker.beginTransaction({});
const options = {transaction: tx};
ctx.req.accessToken.userId = davidCharles;
const filter = {
where: {workerFk: davidCharles, machineFk: 2}
};
try {
const isNotParked = await models.MachineWorker.findOne(filter, options);
const totalBefore = await models.MachineWorker.find(null, options);
await models.MachineWorker.updateInTime(ctx, plate, options);
const isParked = await models.MachineWorker.findOne(filter, options);
const totalAfter = await models.MachineWorker.find(null, options);
expect(isNotParked.outTime).toBeNull();
expect(isParked.outTime).toBeDefined();
expect(totalAfter.length).toEqual(totalBefore.length + 1);
await tx.rollback();
} catch (e) {
await tx.rollback();
}
});
});
});

View File

@ -0,0 +1,77 @@
const UserError = require('vn-loopback/util/user-error');
module.exports = Self => {
Self.remoteMethodCtx('updateInTime', {
description: 'Updates the corresponding registry if the worker has been registered in the last few hours',
accessType: 'WRITE',
accepts: [
{
arg: 'plate',
type: 'string',
}
],
http: {
path: `/updateInTime`,
verb: 'POST'
}
});
Self.updateInTime = async(ctx, plate, options) => {
const models = Self.app.models;
const userId = ctx.req.accessToken.userId;
const $t = ctx.req.__;
let tx;
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
try {
const machine = await models.Machine.findOne({
fields: ['id', 'plate'],
where: {plate}
}, myOptions);
if (!machine)
throw new UserError($t('the plate does not exist', {plate}));
const machineWorker = await Self.findOne({
where: {
or: [{machineFk: machine.id}, {workerFk: userId}],
outTime: null,
}
}, myOptions);
const {maxHours} = await models.MachineWorkerConfig.findOne({fields: ['maxHours']}, myOptions);
const hoursDifference = (Date.vnNow() - machineWorker?.inTime?.getTime() ?? 0) / (60 * 60 * 1000);
if (machineWorker) {
const isHimself = userId == machineWorker.workerFk;
const isSameMachine = machine.id == machineWorker.machineFk;
if (hoursDifference < maxHours && !isHimself)
throw new UserError($t('This machine is already in use.'));
if (hoursDifference < maxHours && isHimself && !isSameMachine)
throw new UserError($t('You are already using a machine'));
await machineWorker.updateAttributes({
outTime: Date.vnNew()
}, myOptions);
}
if (!machineWorker || hoursDifference >= maxHours)
await models.MachineWorker.create({machineFk: machine.id, workerFk: userId}, myOptions);
if (tx) await tx.commit();
} catch (e) {
if (tx) await tx.rollback();
throw e;
}
};
};

View File

@ -0,0 +1,50 @@
module.exports = Self => {
Self.remoteMethodCtx('getVersion', {
description: 'gets app version data',
accessType: 'READ',
accepts: [{
arg: 'app',
type: 'string',
required: true
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/getVersion`,
verb: 'GET'
}
});
Self.getVersion = async(ctx, app) => {
const {models} = Self.app;
const userId = ctx.req.accessToken.userId;
const workerFk = await models.WorkerAppTester.findOne({
where: {
workerFk: userId
}
});
let fields = ['id', 'appName'];
if (workerFk)
fields = fields.concat(['isVersionBetaCritical', 'versionBeta', 'urlBeta']);
else
fields = fields.concat(['isVersionCritical', 'version', 'urlProduction']);
const filter = {
where: {
appName: app
},
fields,
};
const result = await Self.findOne(filter);
return {
isVersionCritical: result?.isVersionBetaCritical ?? result?.isVersionCritical,
version: result?.versionBeta ?? result?.version,
url: result?.urlBeta ?? result?.urlProduction
};
};
};

View File

@ -0,0 +1,29 @@
const {models} = require('vn-loopback/server/server');
describe('mobileAppVersionControl getVersion()', () => {
const appName = 'delivery';
const appNameVersion = '9.2';
const appNameVersionBeta = '9.7';
beforeAll(async() => {
ctx = {
req: {
accessToken: {},
headers: {origin: 'http://localhost'},
}
};
});
it('should get the version app', async() => {
ctx.req.accessToken.userId = 9;
const {version} = await models.MobileAppVersionControl.getVersion(ctx, appName);
expect(version).toEqual(appNameVersion);
});
it('should get the beta version app', async() => {
ctx.req.accessToken.userId = 66;
const {version} = await models.MobileAppVersionControl.getVersion(ctx, appName);
expect(version).toEqual(appNameVersionBeta);
});
});

View File

@ -0,0 +1,20 @@
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:mrw="http://www.mrw.es/">
<soap:Header>
<mrw:AuthInfo>
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
<mrw:CodigoAbonado><%= mrw.subscriberCode %></mrw:CodigoAbonado>
<mrw:CodigoDepartamento/>
<mrw:UserName><%= mrw.user %></mrw:UserName>
<mrw:Password><%= mrw.password %></mrw:Password>
</mrw:AuthInfo>
</soap:Header>
<soap:Body>
<mrw:CancelarEnvio>
<mrw:request>
<mrw:CancelaEnvio>
<mrw:NumeroEnvioOriginal><%= externalId %></mrw:NumeroEnvioOriginal>
</mrw:CancelaEnvio>
</mrw:request>
</mrw:CancelarEnvio>
</soap:Body>
</soap:Envelope>

View File

@ -0,0 +1,46 @@
const axios = require('axios');
const fs = require('fs');
const ejs = require('ejs');
const {DOMParser} = require('xmldom');
module.exports = Self => {
Self.remoteMethod('cancelShipment', {
description: 'Cancel a shipment by providing the expedition ID, interacting with MRW WebService',
accessType: 'WRITE',
accepts: [{
arg: 'expeditionFk',
type: 'number',
required: true
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/cancelShipment`,
verb: 'POST'
}
});
Self.cancelShipment = async expeditionFk => {
const models = Self.app.models;
const mrw = await models.MrwConfig.findOne();
const {externalId} = await models.Expedition.findById(expeditionFk);
const template = fs.readFileSync(__dirname + '/cancelShipment.ejs', 'utf-8');
const renderedXml = ejs.render(template, {mrw, externalId});
const response = await axios.post(mrw.url, renderedXml, {
headers: {
'Content-Type': 'application/soap+xml; charset=utf-8'
}
});
const xmlString = response.data;
const parser = new DOMParser();
const xmlDoc = parser.parseFromString(xmlString, 'text/xml');
const [resultElement] = xmlDoc.getElementsByTagName('Mensaje');
return resultElement.textContent;
};
};

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:mrw="http://www.mrw.es/">
<soap:Header>
<mrw:AuthInfo>
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
<mrw:CodigoAbonado><%= mrw.subscriberCode %></mrw:CodigoAbonado>
<mrw:CodigoDepartamento/>
<mrw:UserName><%= mrw.user %></mrw:UserName>
<mrw:Password><%= mrw.password %></mrw:Password>
</mrw:AuthInfo>
</soap:Header>
<soap:Body>
<mrw:TransmEnvio>
<mrw:request>
<mrw:DatosEntrega>
<mrw:Direccion>
<mrw:CodigoTipoVia/>
<mrw:Via><%= expeditionData.street %></mrw:Via>
<mrw:Numero/>
<mrw:Resto/>
<mrw:CodigoPostal><%= expeditionData.postalCode %></mrw:CodigoPostal>
<mrw:Poblacion><%= expeditionData.city %></mrw:Poblacion>
<mrw:Provincia/>
<mrw:CodigoPais/>
</mrw:Direccion>
<mrw:Nif><%= expeditionData.fi %></mrw:Nif>
<mrw:Nombre><%= expeditionData.clientName %></mrw:Nombre>
<mrw:Telefono><%= expeditionData.phone %></mrw:Telefono>
</mrw:DatosEntrega>
<mrw:DatosServicio>
<mrw:Fecha><%= expeditionData.created %></mrw:Fecha>
<mrw:Referencia><%= expeditionData.expeditionDataId %></mrw:Referencia>
<mrw:CodigoServicio><%= expeditionData.serviceType %></mrw:CodigoServicio>
<mrw:NumeroBultos>1</mrw:NumeroBultos>
<mrw:EntregaSabado><%= expeditionData.weekDays %></mrw:EntregaSabado>
<mrw:Peso><%= expeditionData.kg %></mrw:Peso>
<mrw:Reembolso/>
<mrw:ImporteReembolso/>
</mrw:DatosServicio>
</mrw:request>
</mrw:TransmEnvio>
</soap:Body>
</soap:Envelope>

View File

@ -0,0 +1,118 @@
const axios = require('axios');
const {DOMParser} = require('xmldom');
const fs = require('fs');
const ejs = require('ejs');
const UserError = require('vn-loopback/util/user-error');
module.exports = Self => {
Self.remoteMethod('createShipment', {
description: 'Create an expedition and return a base64Binary label from de MRW WebService',
accessType: 'WRITE',
accepts: [{
arg: 'expeditionFk',
type: 'number',
required: true
}],
returns: {
type: ['object'],
root: true
},
http: {
path: `/createShipment`,
verb: 'POST'
}
});
Self.createShipment = async(expeditionFk, options) => {
const myOptions = {};
let tx;
if (typeof options == 'object')
Object.assign(myOptions, options);
if (!myOptions.transaction) {
tx = await Self.beginTransaction({});
myOptions.transaction = tx;
}
const models = Self.app.models;
const mrw = await models.MrwConfig.findOne(null, myOptions);
if (!mrw)
throw new UserError(`Some mrwConfig parameters are not set`);
const query =
`SELECT CASE co.code
WHEN 'ES' THEN a.postalCode
WHEN 'PT' THEN LEFT(a.postalCode, 4)
WHEN 'AD' THEN REPLACE(a.postalCode, 'AD', '00')
END postalCode,
a.city,
a.street,
co.code countryCode,
c.fi,
c.name clientName,
c.phone,
DATE_FORMAT(t.shipped, '%d/%m/%Y') created,
t.shipped,
e.id expeditionId,
LPAD(IF(mw.params IS NULL, ms.serviceType, mw.serviceType), 4 ,'0') serviceType,
IF(mw.weekdays, 'S', 'N') weekDays
FROM expedition e
JOIN ticket t ON e.ticketFk = t.id
JOIN agencyMode am ON am.id = t.agencyModeFk
JOIN mrwService ms ON ms.agencyModeCodeFk = am.code
LEFT JOIN mrwServiceWeekday mw ON mw.weekdays = DATE_FORMAT(t.shipped, '%a')
JOIN client c ON t.clientFk = c.id
JOIN address a ON t.addressFk = a.id
JOIN province p ON a.provinceFk = p.id
JOIN country co ON co.id = p.countryFk
WHERE e.id = ?
LIMIT 1`;
const [expeditionData] = await Self.rawSql(query, [expeditionFk], myOptions);
if (!expeditionData)
throw new UserError(`This expedition is not a MRW shipment`);
const today = Date.vnNew();
today.setHours(0, 0, 0, 0);
if (expeditionData?.shipped.setHours(0, 0, 0, 0) < today)
throw new UserError(`This ticket has a shipped date earlier than today`);
const shipmentResponse = await sendXmlDoc('createShipment', {mrw, expeditionData}, 'application/soap+xml');
const shipmentId = getTextByTag(shipmentResponse, 'NumeroEnvio');
if (!shipmentId)
throw new UserError(getTextByTag(shipmentResponse, 'Mensaje'));
const getLabelResponse = await sendXmlDoc('getLabel', {mrw, shipmentId}, 'text/xml');
const file = getTextByTag(getLabelResponse, 'EtiquetaFile');
try {
await models.Expedition.updateAll({id: expeditionFk}, {externalId: shipmentId}, myOptions);
if (tx) await tx.commit();
} catch (error) {
if (tx) await tx.rollback();
throw error;
}
return file;
};
function getTextByTag(xmlDoc, tag) {
return xmlDoc?.getElementsByTagName(tag)[0]?.textContent;
}
async function sendXmlDoc(xmlDock, params, contentType) {
const parser = new DOMParser();
const xmlTemplate = fs.readFileSync(__dirname + `/${xmlDock}.ejs`, 'utf-8');
const renderedTemplate = ejs.render(xmlTemplate, params);
const data = await axios.post(params.mrw.url, renderedTemplate, {
headers: {
'Content-Type': `${contentType}; charset=utf-8`
}
});
return parser.parseFromString(data.data, 'text/xml');
}
};

View File

@ -0,0 +1,25 @@
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:mrw="http://www.mrw.es/">
<soapenv:Header>
<mrw:AuthInfo>
<mrw:CodigoFranquicia><%= mrw.franchiseCode %></mrw:CodigoFranquicia>
<mrw:CodigoAbonado><%= mrw.subscriberCode %></mrw:CodigoAbonado>
<mrw:CodigoDepartamento/>
<mrw:UserName><%= mrw.user %></mrw:UserName>
<mrw:Password><%= mrw.password %></mrw:Password>
</mrw:AuthInfo>
</soapenv:Header>
<soapenv:Body>
<mrw:GetEtiquetaEnvio>
<mrw:request>
<mrw:NumeroEnvio><%= shipmentId %></mrw:NumeroEnvio>
<mrw:NumerosEtiqueta>1</mrw:NumerosEtiqueta>
<mrw:SeparadorNumerosEnvio></mrw:SeparadorNumerosEnvio>
<mrw:FechaInicioEnvio></mrw:FechaInicioEnvio>
<mrw:FechaFinEnvio></mrw:FechaFinEnvio>
<mrw:TipoEtiquetaEnvio>0</mrw:TipoEtiquetaEnvio>
<mrw:ReportTopMargin>0</mrw:ReportTopMargin>
<mrw:ReportLeftMargin>0</mrw:ReportLeftMargin>
</mrw:request>
</mrw:GetEtiquetaEnvio>
</soapenv:Body>
</soapenv:Envelope>

View File

@ -0,0 +1,121 @@
const models = require('vn-loopback/server/server').models;
const axios = require('axios');
const fs = require('fs');
const mockBase64Binary = 'base64BinaryString';
const ticket1 = {
'id': '44',
'clientFk': 1101,
'shipped': Date.vnNew(),
'nickname': 'MRW',
'addressFk': 1,
'agencyModeFk': 999
};
const expedition1 = {
'id': 17,
'agencyModeFk': 999,
'ticketFk': 44,
'freightItemFk': 71,
'created': '2001-01-01',
'counter': 1,
'workerFk': 18,
'packagingFk': '94',
'hostFk': '',
'stateTypeFk': 3,
'hasNewRoute': 0,
'isBox': 71,
'editorFk': 100
};
let tx;
let options;
describe('MRWConfig createShipment()', () => {
beforeEach(async() => {
options = tx = undefined;
tx = await models.MrwConfig.beginTransaction({});
options = {transaction: tx};
await models.Agency.create(
{'id': 999, 'name': 'mrw'},
options
);
await models.AgencyMode.create(
{'id': 999, 'name': 'mrw', 'agencyFk': 999, 'code': 'mrw'},
options
);
await models.MrwConfig.create(
{
'id': 1,
'url': 'https://url.com',
'user': 'user',
'password': 'password',
'franchiseCode': 'franchiseCode',
'subscriberCode': 'subscriberCode'
}, options
);
await models.Application.rawSql(
`INSERT INTO vn.mrwService
SET agencyModeCodeFk = 'mrw',
clientType = 1,
serviceType = 1,
kg = 1`, null, options
);
await models.Ticket.create(ticket1, options);
await models.Expedition.create(expedition1, options);
});
afterEach(async() => {
await tx.rollback();
});
it('should create a shipment and return a base64Binary label', async() => {
const mockPostResponses = [
{data: fs.readFileSync(__dirname + '/mockGetLabel.xml', 'utf-8')},
{data: fs.readFileSync(__dirname + '/mockCreateShipment.xml', 'utf-8')}
];
spyOn(axios, 'post').and.callFake(() => Promise.resolve(mockPostResponses.pop()));
const base64Binary = await models.MrwConfig.createShipment(expedition1.id, options);
expect(base64Binary).toEqual(mockBase64Binary);
});
it('should fail if mrwConfig has no data', async() => {
let error;
await models.MrwConfig.createShipment(expedition1.id).catch(e => {
error = e;
}).finally(async() => {
expect(error.message).toEqual(`Some mrwConfig parameters are not set`);
});
expect(error).toBeDefined();
});
it('should fail if expeditionFk is not a MrwExpedition', async() => {
let error;
await models.MrwConfig.createShipment(undefined, options).catch(e => {
error = e;
}).finally(async() => {
expect(error.message).toEqual(`This expedition is not a MRW shipment`);
});
});
it(' should fail if the creation date of this ticket is before the current date it', async() => {
let error;
const yesterday = Date.vnNew();
yesterday.setDate(yesterday.getDate() - 1);
await models.Ticket.updateAll({id: ticket1.id}, {shipped: yesterday}, options);
await models.MrwConfig.createShipment(expedition1.id, options).catch(e => {
error = e;
}).finally(async() => {
expect(error.message).toEqual(`This ticket has a shipped date earlier than today`);
});
});
});

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Body>
<TransmEnvioResponse xmlns="http://www.mrw.es/">
<TransmEnvioResult>
<Estado>1</Estado>
<Mensaje />
<NumeroSolicitud>1</NumeroSolicitud>
<NumeroEnvio>1</NumeroEnvio>
<Url>http://url.com</Url>
</TransmEnvioResult>
</TransmEnvioResponse>
</soap:Body>
</soap:Envelope>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<soap:Envelope xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<soap:Body>
<GetEtiquetaEnvioResponse xmlns="http://www.mrw.es/">
<GetEtiquetaEnvioResult>
<Estado>1</Estado>
<Mensaje />
<EtiquetaFile>base64BinaryString</EtiquetaFile>
</GetEtiquetaEnvioResult>
</GetEtiquetaEnvioResponse>
</soap:Body>
</soap:Envelope>

View File

@ -45,7 +45,6 @@ module.exports = Self => {
}); });
availableNotificationsMap.delete(active.notificationFk); availableNotificationsMap.delete(active.notificationFk);
} }
return { return {
active: [...activeNotificationsMap.entries()], active: [...activeNotificationsMap.entries()],
available: [...availableNotificationsMap.entries()] available: [...availableNotificationsMap.entries()]

View File

@ -4,10 +4,10 @@ describe('NotificationSubscription getList()', () => {
it('should return a list of available and active notifications of a user', async() => { it('should return a list of available and active notifications of a user', async() => {
const userId = 9; const userId = 9;
const {active, available} = await models.NotificationSubscription.getList(userId); const {active, available} = await models.NotificationSubscription.getList(userId);
const notifications = await models.Notification.find({}); const notifications = await models.NotificationSubscription.getAvailable(userId);
const totalAvailable = notifications.length - active.length; const totalAvailable = notifications.size - active.length;
expect(active.length).toEqual(2); expect(active.length).toEqual(3);
expect(available.length).toEqual(totalAvailable); expect(available.length).toEqual(totalAvailable);
}); });
}); });

View File

@ -35,11 +35,18 @@ module.exports = Self => {
let html = `<strong>Motivo</strong>:<br/>${reason}<br/>`; let html = `<strong>Motivo</strong>:<br/>${reason}<br/>`;
html += `<strong>Usuario</strong>:<br/>${ctx.req.accessToken.userId} ${emailUser.email}<br/>`; html += `<strong>Usuario</strong>:<br/>${ctx.req.accessToken.userId} ${emailUser.email}<br/>`;
delete additionalData.backError.config.headers.Authorization;
const httpRequest = JSON.parse(additionalData?.httpRequest);
if (httpRequest)
delete httpRequest.config.headers.Authorization;
additionalData.httpRequest = httpRequest;
for (const data in additionalData) for (const data in additionalData)
html += `<strong>${data}</strong>:<br/>${tryParse(additionalData[data])}<br/>`; html += `<strong>${data}</strong>:<br/>${tryParse(additionalData[data])}<br/>`;
const subjectReason = JSON.parse(additionalData?.httpRequest)?.data?.error; const subjectReason = httpRequest?.data?.error;
smtp.send({ await smtp.send({
to: `${config.app.reportEmail}, ${emailUser.email}`, to: `${config.app.reportEmail}, ${emailUser.email}`,
subject: subject:
'[Support-Salix] ' + '[Support-Salix] ' +

View File

@ -0,0 +1,80 @@
const {ParameterizedSQL} = require('loopback-connector');
const {buildFilter} = require('vn-loopback/util/filter');
module.exports = Self => {
Self.remoteMethod('filter', {
description:
'Find all postcodes of the model matched by postcode, town, province or country.',
accessType: 'READ',
accepts: [
{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, offset, and limit - must be a JSON-encoded string',
http: {source: 'query'}
},
{
arg: 'search',
type: 'string',
description: 'Value to filter',
http: {source: 'query'}
},
],
returns: {
type: ['object'],
root: true,
},
http: {
path: `/filter`,
verb: 'GET',
},
});
Self.filter = async(ctx, filter, options) => {
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
filter = ctx?.filter ?? {};
const conn = Self.dataSource.connector;
const where = buildFilter(filter?.where, (param, value) => {
switch (param) {
case 'search':
return {
or: [
{'pc.code': {like: `%${value}%`}},
{'t.name': {like: `%${value}%`}},
{'p.name': {like: `%${value}%`}},
{'c.country': {like: `%${value}%`}}
]
};
}
}) ?? {};
delete ctx.filter.where;
const stmts = [];
let stmt;
stmt = new ParameterizedSQL(`
SELECT
pc.townFk,
t.provinceFk,
p.countryFk,
pc.code,
t.name as town,
p.name as province,
c.country
FROM
postCode pc
JOIN town t on t.id = pc.townFk
JOIN province p on p.id = t.provinceFk
JOIN country c on c.id = p.countryFk
`);
stmt.merge(conn.makeSuffix({where, ...ctx}));
const itemsIndex = stmts.push(stmt) - 1;
const sql = ParameterizedSQL.join(stmts, ';');
const result = await conn.executeStmt(sql, myOptions);
return itemsIndex === 0 ? result : result[itemsIndex];
};
};

View File

@ -0,0 +1,111 @@
const {models} = require('vn-loopback/server/server');
describe('Postcode filter()', () => {
it('should retrieve with no filter', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
},
limit: 1
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as postcode', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 46,
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(4);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as city', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'Alz',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as province', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'one',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(4);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as country', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'Ec',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
});

View File

@ -19,12 +19,12 @@ module.exports = Self => {
} }
}); });
Self.getUrl = async(appName = 'salix') => { Self.getUrl = async(appName = 'salix') => {
const {url} = await Self.app.models.Url.findOne({ const url = await Self.app.models.Url.findOne({
where: { where: {
appName, appName,
enviroment: process.env.NODE_ENV || 'development' environment: process.env.NODE_ENV || 'development'
} }
}); });
return url; return url?.url;
}; };
}; };

View File

@ -20,7 +20,7 @@ module.exports = Self => {
} }
}); });
Self.internationalExpedition = async expeditionFk => { Self.internationalExpedition = async (expeditionFk) => {
const models = Self.app.models; const models = Self.app.models;
const viaexpressConfig = await models.ViaexpressConfig.findOne({ const viaexpressConfig = await models.ViaexpressConfig.findOne({

View File

@ -20,11 +20,11 @@ module.exports = Self => {
} }
}); });
Self.renderer = async expeditionFk => { Self.renderer = async (expeditionFk) => {
const models = Self.app.models; const models = Self.app.models;
const viaexpressConfig = await models.ViaexpressConfig.findOne({ const viaexpressConfig = await models.ViaexpressConfig.findOne({
fields: ['client', 'user', 'password', 'defaultWeight', 'deliveryType'] fields: ['client', 'user', 'password', 'defaultWeight', 'deliveryType', 'agencyModeFk']
}); });
const expedition = await models.Expedition.findOne({ const expedition = await models.Expedition.findOne({
@ -34,7 +34,7 @@ module.exports = Self => {
{ {
relation: 'ticket', relation: 'ticket',
scope: { scope: {
fields: ['shipped', 'addressFk', 'clientFk', 'companyFk'], fields: ['shipped', 'addressFk', 'clientFk', 'companyFk', 'agencyModeFk'],
include: [ include: [
{ {
relation: 'client', relation: 'client',
@ -102,7 +102,6 @@ module.exports = Self => {
} }
] ]
} }
} }
] ]
}); });
@ -110,13 +109,15 @@ module.exports = Self => {
const ticket = expedition.ticket(); const ticket = expedition.ticket();
const sender = ticket.company().client(); const sender = ticket.company().client();
const shipped = ticket.shipped.toISOString(); const shipped = ticket.shipped.toISOString();
const isInterdia = (ticket.agencyModeFk === viaexpressConfig.agencyModeFk)
const data = { const data = {
viaexpressConfig, viaexpressConfig,
sender, sender,
senderAddress: sender.defaultAddress(), senderAddress: sender.defaultAddress(),
client: ticket.client(), client: ticket.client(),
address: ticket.address(), address: ticket.address(),
shipped shipped,
isInterdia
}; };
const template = fs.readFileSync(__dirname + '/template.ejs', 'utf-8'); const template = fs.readFileSync(__dirname + '/template.ejs', 'utf-8');

View File

@ -13,7 +13,7 @@
<Asegurado>0</Asegurado> <Asegurado>0</Asegurado>
<Imprimir>0</Imprimir> <Imprimir>0</Imprimir>
<ConDevolucionAlbaran>0</ConDevolucionAlbaran> <ConDevolucionAlbaran>0</ConDevolucionAlbaran>
<Intradia>0</Intradia> <Intradia><%= isInterdia %></Intradia>
<Observaciones></Observaciones> <Observaciones></Observaciones>
<AlbaranRemitente></AlbaranRemitente> <AlbaranRemitente></AlbaranRemitente>
<Modo>0</Modo> <Modo>0</Modo>

View File

@ -68,7 +68,7 @@ module.exports = Self => {
userToUpdate.hasGrant = hasGrant; userToUpdate.hasGrant = hasGrant;
if (roleFk) { if (roleFk) {
const role = await models.Role.findById(roleFk, {fields: ['name']}, myOptions); const role = await models.VnRole.findById(roleFk, {fields: ['name']}, myOptions);
const hasRole = await Self.hasRole(userId, role.name, myOptions); const hasRole = await Self.hasRole(userId, role.name, myOptions);
if (!hasRole) if (!hasRole)

View File

@ -1,4 +1,4 @@
const UserError = require('vn-loopback/util/user-error'); const {models} = require('vn-loopback/server/server');
module.exports = Self => { module.exports = Self => {
Self.remoteMethodCtx('renewToken', { Self.remoteMethodCtx('renewToken', {
@ -12,26 +12,44 @@ module.exports = Self => {
http: { http: {
path: `/renewToken`, path: `/renewToken`,
verb: 'POST' verb: 'POST'
} },
}); accessScopes: ['DEFAULT', 'read:multimedia']});
Self.renewToken = async function(ctx) { Self.renewToken = async function(ctx) {
const models = Self.app.models; const {accessToken: token} = ctx.req;
const token = ctx.req.accessToken;
const now = new Date(); // Check if current token is valid
const {renewPeriod, courtesyTime} = await models.AccessTokenConfig.findOne({
fields: ['renewPeriod', 'courtesyTime']
});
const now = Date.now();
const differenceMilliseconds = now - token.created; const differenceMilliseconds = now - token.created;
const differenceSeconds = Math.floor(differenceMilliseconds / 1000); const differenceSeconds = Math.floor(differenceMilliseconds / 1000);
const isNotExceeded = differenceSeconds < renewPeriod - courtesyTime;
if (isNotExceeded)
return token;
const fields = ['renewPeriod', 'courtesyTime']; // Schedule to remove current token
const accessTokenConfig = await models.AccessTokenConfig.findOne({fields}); setTimeout(async() => {
try {
const exists = await models.AccessToken.findById(token.id);
exists && await Self.logout(token.id);
} catch (err) {
// eslint-disable-next-line no-console
console.error(err);
}
}, courtesyTime * 1000);
if (differenceSeconds < accessTokenConfig.renewPeriod - accessTokenConfig.courtesyTime) // Get scopes
throw new UserError(`The renew period has not been exceeded`, 'periodNotExceeded');
await Self.logout(token.id); let createTokenOptions = {};
const {scopes} = token;
if (scopes)
createTokenOptions = {scopes: [scopes[0]]};
// Create new accessToken
const user = await Self.findById(token.userId); const user = await Self.findById(token.userId);
const accessToken = await user.createAccessToken(); const accessToken = await user.accessTokens.create(createTokenOptions);
return {id: accessToken.id, ttl: accessToken.ttl}; return {id: accessToken.id, ttl: accessToken.ttl};
}; };

View File

@ -0,0 +1,27 @@
module.exports = Self => {
Self.remoteMethodCtx('shareToken', {
description: 'Returns token to view files or images and share it',
accessType: 'WRITE',
accepts: [],
returns: {
type: 'Object',
root: true
},
http: {
path: `/shareToken`,
verb: 'GET'
}
});
Self.shareToken = async function(ctx) {
const {accessToken: token} = ctx.req;
const user = await Self.findById(token.userId);
const multimediaToken = await user.accessTokens.create({
scopes: ['read:multimedia']
});
return {multimediaToken};
};
};

View File

@ -70,7 +70,7 @@ describe('VnUser privileges()', () => {
const tx = await models.VnUser.beginTransaction({}); const tx = await models.VnUser.beginTransaction({});
const options = {transaction: tx}; const options = {transaction: tx};
const agency = await models.Role.findOne({ const agency = await models.VnRole.findOne({
where: { where: {
name: 'agency' name: 'agency'
} }

View File

@ -0,0 +1,64 @@
const {models} = require('vn-loopback/server/server');
describe('Renew Token', () => {
const startingTime = Date.now();
let ctx = null;
beforeAll(async() => {
const unAuthCtx = {
req: {
headers: {},
connection: {
remoteAddress: '127.0.0.1'
},
getLocale: () => 'en'
},
args: {}
};
let login = await models.VnUser.signIn(unAuthCtx, 'salesAssistant', 'nightmare');
let accessToken = await models.AccessToken.findById(login.token);
ctx = {req: {accessToken: accessToken}};
});
beforeEach(() => {
jasmine.clock().install();
jasmine.clock().mockDate(new Date(startingTime));
});
afterEach(() => {
jasmine.clock().uninstall();
});
it('should renew token', async() => {
const {courtesyTime} = await models.AccessTokenConfig.findOne({
fields: ['courtesyTime']
});
const mockDate = new Date(startingTime + 26600000);
jasmine.clock().mockDate(mockDate);
const {id} = await models.VnUser.renewToken(ctx);
expect(id).not.toEqual(ctx.req.accessToken.id);
await models.VnUser.logout(ctx.req.accessToken.id);
jasmine.clock().tick((courtesyTime + 10) * 1000);
let tokenNotExists;
try {
tokenNotExists = await models.AccessToken.findById(ctx.req.accessToken.id);
} catch (e) {
error = e;
}
expect(tokenNotExists).toBeNull();
});
it('NOT should renew', async() => {
let error;
let response;
try {
response = await models.VnUser.renewToken(ctx);
} catch (e) {
error = e;
}
expect(error).toBeUndefined();
expect(response.id).toEqual(ctx.req.accessToken.id);
});
});

View File

@ -0,0 +1,64 @@
const {models} = require('vn-loopback/server/server');
const TOKEN_MULTIMEDIA = 'read:multimedia';
describe('Share Token', () => {
let ctx = null;
const startingTime = Date.now();
let multimediaToken = null;
beforeAll(async() => {
const unAuthCtx = {
req: {
headers: {},
connection: {
remoteAddress: '127.0.0.1'
},
getLocale: () => 'en'
},
args: {}
};
let login = await models.VnUser.signIn(unAuthCtx, 'salesAssistant', 'nightmare');
let accessToken = await models.AccessToken.findById(login.token);
ctx = {req: {accessToken: accessToken}};
});
beforeEach(async() => {
multimediaToken = await models.VnUser.shareToken(ctx);
jasmine.clock().install();
jasmine.clock().mockDate(new Date(startingTime));
});
afterEach(() => {
jasmine.clock().uninstall();
});
it('should generate token', async() => {
expect(Object.keys(multimediaToken).length).toEqual(1);
expect(multimediaToken.multimediaToken.userId).toEqual(ctx.req.accessToken.userId);
expect(multimediaToken.multimediaToken.scopes[0]).toEqual(TOKEN_MULTIMEDIA);
});
it('NOT should renew', async() => {
let error;
let response;
try {
response = await models.VnUser.renewToken(ctx);
} catch (e) {
error = e;
}
expect(error).toBeUndefined();
expect(response.id).toEqual(ctx.req.accessToken.id);
});
it('should renew token', async() => {
const mockDate = new Date(startingTime + 26600000);
jasmine.clock().mockDate(mockDate);
const newShareToken = await models.VnUser.renewToken({req: {accessToken: multimediaToken.multimediaToken}});
const {id} = newShareToken;
expect(id).not.toEqual(ctx.req.accessToken.id);
const newMultimediaToken = await models.AccessToken.findById(id);
expect(newMultimediaToken.scopes[0]).toEqual(TOKEN_MULTIMEDIA);
});
});

View File

@ -20,10 +20,7 @@ describe('VnUser Sign-in()', () => {
let ctx = {req: {accessToken: accessToken}}; let ctx = {req: {accessToken: accessToken}};
let signInLog = await SignInLog.find({where: {token: accessToken.id}}); let signInLog = await SignInLog.find({where: {token: accessToken.id}});
expect(signInLog.length).toEqual(1); expect(signInLog.length).toEqual(0);
expect(signInLog[0].userFk).toEqual(accessToken.userId);
expect(signInLog[0].owner).toEqual(true);
expect(login.token).toBeDefined();
await VnUser.logout(ctx.req.accessToken.id); await VnUser.logout(ctx.req.accessToken.id);
}); });

View File

@ -1,17 +0,0 @@
module.exports = Self => {
Self.remoteMethod('validateToken', {
description: 'Validates the current logged user token',
returns: {
type: 'Boolean',
root: true
},
http: {
path: `/validateToken`,
verb: 'GET'
}
});
Self.validateToken = async function() {
return true;
};
};

View File

@ -13,10 +13,10 @@
"AuthCode": { "AuthCode": {
"dataSource": "vn" "dataSource": "vn"
}, },
"Bank": { "Accounting": {
"dataSource": "vn" "dataSource": "vn"
}, },
"Buyer": { "Buyer": {
"dataSource": "vn" "dataSource": "vn"
}, },
"Campaign": { "Campaign": {
@ -79,15 +79,24 @@
"Language": { "Language": {
"dataSource": "vn" "dataSource": "vn"
}, },
"Machine": {
"dataSource": "vn"
},
"MachineWorker": { "MachineWorker": {
"dataSource": "vn" "dataSource": "vn"
}, },
"MachineWorkerConfig": {
"dataSource": "vn"
},
"MobileAppVersionControl": { "MobileAppVersionControl": {
"dataSource": "vn" "dataSource": "vn"
}, },
"Module": { "Module": {
"dataSource": "vn" "dataSource": "vn"
}, },
"MrwConfig": {
"dataSource": "vn"
},
"Notification": { "Notification": {
"dataSource": "vn" "dataSource": "vn"
}, },
@ -139,9 +148,6 @@
"Warehouse": { "Warehouse": {
"dataSource": "vn" "dataSource": "vn"
}, },
"VnUser": {
"dataSource": "vn"
},
"OsTicket": { "OsTicket": {
"dataSource": "osticket" "dataSource": "osticket"
}, },
@ -156,8 +162,20 @@
}, },
"ViaexpressConfig": { "ViaexpressConfig": {
"dataSource": "vn" "dataSource": "vn"
},
"VnUser": {
"dataSource": "vn"
},
"VnRole": {
"dataSource": "vn"
},
"WorkerActivity": {
"dataSource": "vn"
},
"WorkerActivityType": {
"dataSource": "vn"
},
"ProductionConfig": {
"dataSource": "vn"
} }
} }

View File

@ -1,9 +1,9 @@
{ {
"name": "Bank", "name": "Accounting",
"base": "VnModel", "base": "VnModel",
"options": { "options": {
"mysql": { "mysql": {
"table": "bank" "table": "accounting"
} }
}, },
"properties": { "properties": {
@ -22,10 +22,7 @@
}, },
"accountingTypeFk": { "accountingTypeFk": {
"type": "number", "type": "number",
"required": true, "required": true
"mysql": {
"columnName": "cash"
}
}, },
"entityFk": { "entityFk": {
"type": "number", "type": "number",

View File

@ -8,6 +8,26 @@ module.exports = Self => {
}); });
Self.validatesUniquenessOf('bic', { Self.validatesUniquenessOf('bic', {
message: 'This BIC already exist.' message: 'This BIC already exist'
}); });
Self.validatesPresenceOf('countryFk', {
message: 'CountryFK cannot be empty'
});
Self.validateAsync('bic', checkBic, {
message: 'Bank entity id must be specified'
});
async function checkBic(err, done) {
const filter = {
fields: ['code'],
where: {id: this.countryFk}
};
const country = await Self.app.models.Country.findOne(filter);
const code = country ? country.code.toLowerCase() : null;
if (code == 'es' && !this.id)
err();
done();
}
}; };

View File

@ -1,7 +1,8 @@
module.exports = Self => { module.exports = Self => {
require('../methods/collection/getCollection')(Self); require('../methods/collection/getCollection')(Self);
require('../methods/collection/getSectors')(Self);
require('../methods/collection/setSaleQuantity')(Self); require('../methods/collection/setSaleQuantity')(Self);
require('../methods/collection/previousLabel')(Self); require('../methods/collection/previousLabel')(Self);
require('../methods/collection/getTickets')(Self); require('../methods/collection/getTickets')(Self);
require('../methods/collection/assign')(Self);
require('../methods/collection/getSales')(Self);
}; };

View File

@ -17,10 +17,6 @@
"type": "string", "type": "string",
"required": true "required": true
}, },
"path": {
"type": "string",
"required": true
},
"code": { "code": {
"type": "string", "type": "string",
"required": true "required": true
@ -29,12 +25,12 @@
"relations": { "relations": {
"readRole": { "readRole": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "readRoleFk" "foreignKey": "readRoleFk"
}, },
"writeRole": { "writeRole": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "writeRoleFk" "foreignKey": "writeRoleFk"
} }
}, },

View File

@ -1,4 +1,6 @@
const UserError = require('vn-loopback/util/user-error'); const UserError = require('vn-loopback/util/user-error');
const fs = require('fs-extra');
const path = require('path');
module.exports = Self => { module.exports = Self => {
require('../methods/dms/downloadFile')(Self); require('../methods/dms/downloadFile')(Self);
@ -35,4 +37,32 @@ module.exports = Self => {
return [stream, dms.contentType, `filename="${dms.file}"`]; return [stream, dms.contentType, `filename="${dms.file}"`];
}; };
Self.getPath = async function(dms) {
const models = Self.app.models;
const pathHash = await models.DmsContainer.getHash(dms.id);
const dmsContainer = await models.DmsContainer.container(pathHash);
const dstFile = path.join(dmsContainer.client.root, pathHash, dms.file);
return dstFile;
};
Self.createWithExtension = async function(data, extension, options) {
const newDms = await Self.create(data, options);
return newDms.updateAttribute('file', `${newDms.id}.${extension}`, options);
};
Self.createFromFile = async function(data, extension, srcFile, options) {
const dms = await Self.createWithExtension(data, extension, options);
const dstFile = await Self.getPath(dms);
await fs.move(srcFile, dstFile, {overwrite: true});
return dms;
};
Self.createFromStream = async function(data, extension, stream, options) {
const dms = await Self.createWithExtension(data, extension, options);
const dstFile = await Self.getPath(dms);
const writeStream = await fs.createWriteStream(dstFile);
await stream.pipe(writeStream);
return dms;
};
}; };

View File

@ -0,0 +1,17 @@
{
"name": "docuwareTablet",
"base": "VnModel",
"options": {
"mysql": {
"table": "docuwareTablet"
}
},
"properties": {
"tablet": {
"type": "string"
},
"description": {
"type": "string"
}
}
}

View File

@ -46,12 +46,12 @@
}, },
"readRole": { "readRole": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "readRoleFk" "foreignKey": "readRoleFk"
}, },
"writeRole": { "writeRole": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "writeRoleFk" "foreignKey": "writeRoleFk"
} }
}, },
@ -64,4 +64,3 @@
} }
] ]
} }

View File

@ -0,0 +1,18 @@
{
"name": "MachineWorkerConfig",
"base": "VnModel",
"options": {
"mysql": {
"table": "vn.machineWorkerConfig"
}
},
"properties": {
"id": {
"type": "number",
"id": true
},
"maxHours": {
"type": "number"
}
}
}

View File

@ -0,0 +1,3 @@
module.exports = Self => {
require('../methods/machine-worker/updateInTime')(Self);
};

18
back/models/machine.json Normal file
View File

@ -0,0 +1,18 @@
{
"name": "Machine",
"base": "VnModel",
"options": {
"mysql": {
"table": "vn.machine"
}
},
"properties": {
"id": {
"type": "number",
"id": true
},
"plate": {
"type": "string"
}
}
}

View File

@ -0,0 +1,3 @@
module.exports = Self => {
require('../methods/mobile-app-version-control/getVersion')(Self);
};

View File

@ -0,0 +1,39 @@
{
"name": "MobileAppVersionControl",
"base": "VnModel",
"options": {
"mysql": {
"table": "vn.mobileAppVersionControl"
}
},
"properties": {
"id": {
"type": "number",
"id": true
},
"appName": {
"type": "string"
},
"version": {
"type": "string"
},
"isVersionCritical": {
"type": "boolean"
},
"urlProduction": {
"type": "string"
},
"urlBeta": {
"type": "string"
},
"versionBeta": {
"type": "string"
},
"isVersionBetaCritical": {
"type": "boolean"
}
}
}

View File

@ -0,0 +1,4 @@
module.exports = Self => {
require('../methods/mrw-config/createShipment')(Self);
require('../methods/mrw-config/cancelShipment')(Self);
};

View File

@ -0,0 +1,32 @@
{
"name": "MrwConfig",
"base": "VnModel",
"options": {
"mysql": {
"table": "mrwConfig"
}
},
"properties": {
"id": {
"type": "number",
"id": true,
"required": true
},
"url": {
"type": "string",
"required": true
},
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"franchiseCode": {
"type": "string"
},
"subscriberCode": {
"type": "string"
}
}
}

View File

@ -24,8 +24,8 @@
}, },
"role": { "role": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "roleFk" "foreignKey": "roleFk"
} }
} }
} }

View File

@ -47,7 +47,7 @@
}, },
"bank": { "bank": {
"type": "belongsTo", "type": "belongsTo",
"model": "Bank", "model": "Accounting",
"foreignKey": "bankFk" "foreignKey": "bankFk"
}, },
"payMethod": { "payMethod": {

View File

@ -1,6 +1,7 @@
let UserError = require('vn-loopback/util/user-error'); let UserError = require('vn-loopback/util/user-error');
module.exports = Self => { module.exports = Self => {
require('../methods/postcode/filter.js')(Self);
Self.rewriteDbError(function(err) { Self.rewriteDbError(function(err) {
if (err.code === 'ER_DUP_ENTRY') if (err.code === 'ER_DUP_ENTRY')
return new UserError(`This postcode already exists`); return new UserError(`This postcode already exists`);

View File

@ -0,0 +1,19 @@
{
"name": "ProductionConfig",
"base": "VnModel",
"options": {
"mysql": {
"table": "productionConfig"
}
},
"properties": {
"id": {
"type": "number",
"required": true,
"id": true
},
"backupPrinterNotificationDelay": {
"type": "string"
}
}
}

View File

@ -0,0 +1,56 @@
const models = require('vn-loopback/server/server').models;
describe('loopback model MailAliasAccount', () => {
it('should add a mail Alias', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 9}};
await models.MailAliasAccount.create({mailAlias: 2, account: 5}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
it('should add a mail Alias of an inherit role', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 9}};
await models.MailAliasAccount.create({mailAlias: 3, account: 5}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
it('should delete a mail Alias', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 1}};
const mailAclId = 2;
await models.MailAliasAccount.destroyAll({id: mailAclId}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
});

View File

@ -41,8 +41,7 @@ describe('loopback model NotificationSubscription', () => {
try { try {
const options = {transaction: tx, accessToken: {userId: 9}}; const options = {transaction: tx, accessToken: {userId: 9}};
const notificationSubscriptionId = 2; await models.NotificationSubscription.destroyAll({id: 2}, options);
await models.NotificationSubscription.destroyAll({id: notificationSubscriptionId}, options);
await tx.rollback(); await tx.rollback();
} catch (e) { } catch (e) {
@ -76,8 +75,7 @@ describe('loopback model NotificationSubscription', () => {
try { try {
const options = {transaction: tx, accessToken: {userId: 9}}; const options = {transaction: tx, accessToken: {userId: 9}};
const notificationSubscriptionId = 6; await models.NotificationSubscription.destroyAll({id: 6}, options);
await models.NotificationSubscription.destroyAll({id: notificationSubscriptionId}, options);
await tx.rollback(); await tx.rollback();
} catch (e) { } catch (e) {
@ -111,8 +109,7 @@ describe('loopback model NotificationSubscription', () => {
try { try {
const options = {transaction: tx, accessToken: {userId: 19}}; const options = {transaction: tx, accessToken: {userId: 19}};
const notificationSubscriptionId = 4; await models.NotificationSubscription.destroyAll({id: 4}, options);
await models.NotificationSubscription.destroyAll({id: notificationSubscriptionId}, options);
await tx.rollback(); await tx.rollback();
} catch (e) { } catch (e) {

View File

@ -26,6 +26,9 @@
}, },
"darkMode": { "darkMode": {
"type": "boolean" "type": "boolean"
},
"tabletFk": {
"type": "string"
} }
}, },
"relations": { "relations": {
@ -43,6 +46,11 @@
"type": "belongsTo", "type": "belongsTo",
"model": "VnUser", "model": "VnUser",
"foreignKey": "userFk" "foreignKey": "userFk"
} },
"Tablet": {
"type": "belongsTo",
"model": "docuwareTablet",
"foreignKey": "tabletFk"
}
} }
} }

View File

@ -29,6 +29,9 @@
}, },
"deliveryType": { "deliveryType": {
"type": "string" "type": "string"
},
"agencyModeFk": {
"type": "number"
} }
} }
} }

13
back/models/vn-role.json Normal file
View File

@ -0,0 +1,13 @@
{
"name": "VnRole",
"base": "Role",
"validateUpsert": true,
"options": {
"mysql": {
"table": "account.role"
}
},
"mixins": {
"Loggable": true
}
}

View File

@ -10,10 +10,10 @@ module.exports = function(Self) {
require('../methods/vn-user/sign-in')(Self); require('../methods/vn-user/sign-in')(Self);
require('../methods/vn-user/acl')(Self); require('../methods/vn-user/acl')(Self);
require('../methods/vn-user/recover-password')(Self); require('../methods/vn-user/recover-password')(Self);
require('../methods/vn-user/validate-token')(Self);
require('../methods/vn-user/privileges')(Self); require('../methods/vn-user/privileges')(Self);
require('../methods/vn-user/validate-auth')(Self); require('../methods/vn-user/validate-auth')(Self);
require('../methods/vn-user/renew-token')(Self); require('../methods/vn-user/renew-token')(Self);
require('../methods/vn-user/share-token')(Self);
require('../methods/vn-user/update-user')(Self); require('../methods/vn-user/update-user')(Self);
Self.definition.settings.acls = Self.definition.settings.acls.filter(acl => acl.property !== 'create'); Self.definition.settings.acls = Self.definition.settings.acls.filter(acl => acl.property !== 'create');
@ -80,10 +80,10 @@ module.exports = function(Self) {
Self.getRoles = async(userId, options) => { Self.getRoles = async(userId, options) => {
const result = await Self.rawSql( const result = await Self.rawSql(
`SELECT r.name `SELECT r.name
FROM account.user u FROM account.user u
JOIN account.roleRole rr ON rr.role = u.role JOIN account.roleRole rr ON rr.role = u.role
JOIN account.role r ON r.id = rr.inheritsFrom JOIN account.role r ON r.id = rr.inheritsFrom
WHERE u.id = ?`, [userId], options); WHERE u.id = ?`, [userId], options);
const roles = []; const roles = [];
for (const role of result) for (const role of result)
@ -135,15 +135,16 @@ module.exports = function(Self) {
Self.signInValidate = async(user, userToken, token, ctx) => { Self.signInValidate = async(user, userToken, token, ctx) => {
const [[key, value]] = Object.entries(Self.userUses(user)); const [[key, value]] = Object.entries(Self.userUses(user));
const isOwner = Self.rawSql(`SELECT ? = ? `, [userToken[key], value]); const isOwner = Self.rawSql(`SELECT ? = ? `, [userToken[key], value]);
await Self.app.models.SignInLog.create({ if (!isOwner) {
userName: user, await Self.app.models.SignInLog.create({
token: token.id, userName: user,
userFk: userToken.id, token: token.id,
ip: ctx.req.ip, userFk: userToken.id,
owner: isOwner ip: ctx.req.ip,
}); owner: isOwner
if (!isOwner) });
throw new UserError('Try again'); throw new UserError('Try again');
}
}; };
/** /**
@ -258,18 +259,20 @@ module.exports = function(Self) {
class Mailer { class Mailer {
async send(verifyOptions, cb) { async send(verifyOptions, cb) {
const url = new URL(verifyOptions.verifyHref); try {
if (process.env.NODE_ENV) url.port = ''; const url = new URL(verifyOptions.verifyHref);
if (process.env.NODE_ENV) url.port = '';
const params = { const email = new Email('email-verify', {
url: url.href, url: url.href,
recipient: verifyOptions.to recipient: verifyOptions.to
}; });
await email.send();
const email = new Email('email-verify', params); cb(null, verifyOptions.to);
email.send(); } catch (err) {
cb(err);
cb(null, verifyOptions.to); }
} }
} }

View File

@ -1,100 +1,103 @@
{ {
"name": "VnUser", "name": "VnUser",
"base": "User", "base": "User",
"validateUpsert": true, "validateUpsert": true,
"options": { "options": {
"mysql": { "mysql": {
"table": "account.user" "table": "account.user"
} }
}, },
"mixins": {
"Loggable": true
},
"resetPasswordTokenTTL": "604800", "resetPasswordTokenTTL": "604800",
"properties": { "properties": {
"id": { "id": {
"type": "number", "type": "number",
"id": true "id": true
}, },
"name": { "name": {
"type": "string", "type": "string",
"required": true "required": true
}, },
"username": { "username": {
"type": "string" "type": "string"
}, },
"roleFk": { "roleFk": {
"type": "number", "type": "number",
"mysql": { "mysql": {
"columnName": "role" "columnName": "role"
} }
}, },
"nickname": { "nickname": {
"type": "string" "type": "string"
}, },
"lang": { "lang": {
"type": "string" "type": "string"
}, },
"active": { "active": {
"type": "boolean" "type": "boolean"
}, },
"email": { "email": {
"type": "string" "type": "string"
}, },
"emailVerified": { "emailVerified": {
"type": "boolean" "type": "boolean"
}, },
"created": { "created": {
"type": "date" "type": "date"
}, },
"updated": { "updated": {
"type": "date" "type": "date"
}, },
"image": { "image": {
"type": "string" "type": "string"
}, },
"hasGrant": { "hasGrant": {
"type": "boolean" "type": "boolean"
}, },
"passExpired": { "passExpired": {
"type": "date" "type": "date"
}, },
"twoFactor": { "twoFactor": {
"type": "string" "type": "string"
} }
}, },
"relations": { "relations": {
"role": { "role": {
"type": "belongsTo", "type": "belongsTo",
"model": "Role", "model": "VnRole",
"foreignKey": "roleFk" "foreignKey": "roleFk"
}, },
"roles": { "roles": {
"type": "hasMany", "type": "hasMany",
"model": "RoleRole", "model": "RoleRole",
"foreignKey": "role", "foreignKey": "role",
"primaryKey": "roleFk" "primaryKey": "roleFk"
}, },
"emailUser": { "emailUser": {
"type": "hasOne", "type": "hasOne",
"model": "EmailUser", "model": "EmailUser",
"foreignKey": "userFk" "foreignKey": "userFk"
}, },
"worker": { "worker": {
"type": "hasOne", "type": "hasOne",
"model": "Worker", "model": "Worker",
"foreignKey": "id" "foreignKey": "id"
}, },
"userConfig": { "userConfig": {
"type": "hasOne", "type": "hasOne",
"model": "UserConfig", "model": "UserConfig",
"foreignKey": "userFk" "foreignKey": "userFk"
} }
}, },
"acls": [ "acls": [
{ {
"property": "signIn", "property": "signIn",
"accessType": "EXECUTE", "accessType": "EXECUTE",
"principalType": "ROLE", "principalType": "ROLE",
"principalId": "$everyone", "principalId": "$everyone",
"permission": "ALLOW" "permission": "ALLOW"
}, },
{ {
"property": "recoverPassword", "property": "recoverPassword",
@ -103,28 +106,35 @@
"principalId": "$everyone", "principalId": "$everyone",
"permission": "ALLOW" "permission": "ALLOW"
}, },
{ {
"property": "validateToken",
"accessType": "EXECUTE",
"principalType": "ROLE",
"principalId": "$authenticated",
"permission": "ALLOW"
},
{
"property": "validateAuth", "property": "validateAuth",
"accessType": "EXECUTE", "accessType": "EXECUTE",
"principalType": "ROLE", "principalType": "ROLE",
"principalId": "$everyone", "principalId": "$everyone",
"permission": "ALLOW" "permission": "ALLOW"
}, },
{ {
"property": "privileges", "property": "privileges",
"accessType": "*", "accessType": "*",
"principalType": "ROLE", "principalType": "ROLE",
"principalId": "$authenticated", "principalId": "$authenticated",
"permission": "ALLOW" "permission": "ALLOW"
} },
], {
"property": "renewToken",
"accessType": "WRITE",
"principalType": "ROLE",
"principalId": "$authenticated",
"permission": "ALLOW"
},
{
"property": "shareToken",
"accessType": "WRITE",
"principalType": "ROLE",
"principalId": "$authenticated",
"permission": "ALLOW"
}
],
"scopes": { "scopes": {
"preview": { "preview": {
"fields": [ "fields": [
@ -141,7 +151,7 @@
"hasGrant", "hasGrant",
"realm", "realm",
"email", "email",
"emailVerified" "emailVerified"
] ]
} }
} }

View File

@ -0,0 +1,39 @@
{
"name": "WorkerActivity",
"base": "VnModel",
"options": {
"mysql": {
"table": "workerActivity"
}
},
"properties": {
"id": {
"id": true,
"type": "number"
},
"created": {
"type": "date"
},
"model": {
"type": "string"
},
"event": {
"type": "string"
},
"description": {
"type": "string"
},
"relations": {
"workerFk": {
"type": "belongsTo",
"model": "Worker",
"foreignKey": "workerFk"
},
"workerActivityTypeFk": {
"type": "belongsTo",
"model": "WorkerActivityType",
"foreignKey": "workerActivityTypeFk"
}
}
}
}

View File

@ -0,0 +1,19 @@
{
"name": "WorkerActivityType",
"base": "VnModel",
"options": {
"mysql": {
"table": "workerActivityType"
}
},
"properties": {
"code": {
"id": true,
"type": "string"
},
"description": {
"type": "string",
"required": false
}
}
}

34
back/tests-helper.js Normal file
View File

@ -0,0 +1,34 @@
/* eslint-disable no-console */
const app = require('vn-loopback/server/server');
let dataSources = require('../loopback/server/datasources.json');
async function init() {
console.log('Initializing backend.');
dataSources = JSON.parse(JSON.stringify(dataSources));
Object.assign(dataSources.vn, {
host: process.env.DB_HOST,
port: process.env.DB_PORT
});
const bootOptions = {dataSources};
await new Promise((resolve, reject) => {
app.boot(bootOptions,
err => err ? reject(err) : resolve());
});
// FIXME: Workaround to wait for loopback to be ready
await app.models.Application.status();
}
async function deinit() {
console.log('Stopping backend.');
await app.disconnect();
}
module.exports = {
init,
deinit
};
if (require.main === module)
init();

View File

@ -1,80 +1,123 @@
const Docker = require('../db/docker.js'); /* eslint-disable no-console */
let dataSources = require('../loopback/server/datasources.json'); const path = require('path');
const getopts = require('getopts');
const Myt = require('@verdnatura/myt/myt');
const Run = require('@verdnatura/myt/myt-run');
const helper = require('./tests-helper');
process.on('warning', warning => { const opts = getopts(process.argv.slice(2), {
console.log(warning.name); string: [
console.log(warning.message); 'network'
console.log(warning.stack); ],
boolean: [
'ci',
'junit'
]
}); });
process.on('exit', async function() { let server;
if (container) await container.rm(); const PARALLEL = false;
}); const SETUP_TIMEOUT = 15 * 60 * 1000;
const SPEC_TIMEOUT = 30 * 1000;
process.on('exit', teardown);
process.on('uncaughtException', onError);
process.on('unhandledRejection', onError);
const exitSignals = [
'SIGINT',
'SIGUSR1',
'SIGUSR2'
];
for (const signal of exitSignals)
process.on(signal, () => process.exit());
async function setup() {
console.log('Building and running DB container.');
const myt = new Myt();
await myt.init({
workspace: path.join(__dirname, '..'),
random: true,
ci: opts.ci,
tmpfs: process.platform == 'linux',
network: opts.network || null
});
server = await myt.run(Run);
await myt.deinit();
const {dbConfig} = server;
process.env.DB_HOST = dbConfig.host;
process.env.DB_PORT = dbConfig.port;
if (!PARALLEL)
await helper.init();
}
async function teardown() {
if (!server) return;
const oldServer = server;
server = null;
if (!PARALLEL)
await helper.deinit();
console.log('Stopping and removing DB container.');
await oldServer.rm();
}
async function onError(err) {
console.error(err);
process.exit(1);
}
let container;
async function test() { async function test() {
let isCI = false; let runner;
const config = {
globalSetup: setup,
globalSetupTimeout: SETUP_TIMEOUT,
globalTeardown: teardown,
globalTeardownTimeout: SETUP_TIMEOUT,
spec_dir: '.',
spec_files: [
'back/**/*[sS]pec.js',
'loopback/**/*[sS]pec.js',
'modules/*/back/**/*.[sS]pec.js'
],
helpers: []
};
if (process.argv[2] === 'ci') if (PARALLEL) {
isCI = true; const ParallelRunner = require('jasmine/parallel');
runner = new ParallelRunner({numWorkers: 1});
config.helpers.push(`back/tests-helper.js`);
} else {
const Jasmine = require('jasmine');
runner = new Jasmine();
container = new Docker(); const SpecReporter = require('jasmine-spec-reporter').SpecReporter;
runner.addReporter(new SpecReporter({
await container.run(isCI); spec: {
dataSources = JSON.parse(JSON.stringify(dataSources)); displaySuccessful: opts.ci,
displayPending: opts.ci
Object.assign(dataSources.vn, { },
host: container.dbConf.host, summary: {
port: container.dbConf.port displayPending: false,
}); }
}));
const bootOptions = {dataSources};
const app = require('vn-loopback/server/server');
await new Promise((resolve, reject) => {
app.boot(bootOptions,
err => err ? reject(err) : resolve());
});
// FIXME: Workaround to wait for loopback to be ready
await app.models.Application.status();
const Jasmine = require('jasmine');
const jasmine = new Jasmine();
const SpecReporter = require('jasmine-spec-reporter').SpecReporter;
jasmine.addReporter(new SpecReporter({
spec: {
displaySuccessful: isCI,
displayPending: isCI
},
summary: {
displayPending: false,
}
}));
if (isCI) {
const JunitReporter = require('jasmine-reporters');
jasmine.addReporter(new JunitReporter.JUnitXmlReporter());
jasmine.jasmine.DEFAULT_TIMEOUT_INTERVAL = 90000;
jasmine.exitOnCompletion = true;
} }
const backSpecs = [ if (opts.junit) {
'./back/**/*[sS]pec.js', const JunitReporter = require('jasmine-reporters');
'./loopback/**/*[sS]pec.js', runner.addReporter(new JunitReporter.JUnitXmlReporter());
'./modules/*/back/**/*.[sS]pec.js' }
]; if (opts.ci)
runner.jasmine.DEFAULT_TIMEOUT_INTERVAL = SPEC_TIMEOUT;
jasmine.loadConfig({ // runner.loadConfigFile('back/jasmine.json');
spec_dir: '.', runner.loadConfig(config);
spec_files: backSpecs, process.env.SPEC_IS_RUNNING = true;
helpers: [], await runner.execute();
});
await jasmine.execute();
if (app) await app.disconnect();
if (container) await container.rm();
console.log('App disconnected & container removed');
} }
test(); test();

1
commitlint.config.js Normal file
View File

@ -0,0 +1 @@
module.exports = {extends: ['@commitlint/config-conventional']};

View File

@ -1,49 +0,0 @@
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('ClientConsumptionQueue', '*', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNotePdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteCsvPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteCsvEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'campaignMetricsPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'campaignMetricsEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientWelcomeHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientWelcomeEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'printerSetupHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'printerSetupEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'sepaCoreEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorStHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorStEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorNdHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorNdEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'consumptionSendQueued', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('InvoiceOut', 'invoiceEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('InvoiceOut', 'exportationPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('InvoiceOut', 'sendQueued', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Ticket', 'invoiceCsvPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'invoiceCsvEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Supplier', 'campaignMetricsPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Supplier', 'campaignMetricsEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Travel', 'extraCommunityPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Travel', 'extraCommunityEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Entry', 'entryOrderPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('OsTicket', 'osTicketReportEmail', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Item', 'buyerWasteEmail', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Claim', 'claimPickupPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Claim', 'claimPickupEmail', 'WRITE', 'ALLOW', 'ROLE', 'claimManager'),
('Item', 'labelPdf', 'READ', 'ALLOW', 'ROLE', 'employee');
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalType,principalId)
VALUES ('Sector','*','READ','ALLOW','ROLE','employee');
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalType,principalId)
VALUES ('Sector','*','WRITE','ALLOW','ROLE','employee');

View File

@ -1,9 +0,0 @@
create table `vn`.`clientConsumptionQueue`
(
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
params json not null,
queued datetime default current_timestamp() not null,
printed datetime null,
status varchar(50) default '' null
)
comment 'Queue for client consumption PDF mailing';

View File

@ -1 +0,0 @@
rename table `vn`.`invoiceOut_queue` to `vn`.`invoiceOutQueue`;

View File

@ -1,5 +0,0 @@
ALTER TABLE `vn`.`itemConfig`
ADD id int null PRIMARY KEY first;
ALTER TABLE `vn`.`itemConfig`
ADD wasteRecipients VARCHAR(50) NOT NULL comment 'Weekly waste report schedule recipients';

View File

@ -1,10 +0,0 @@
create table `salix`.`printConfig`
(
id int auto_increment,
itRecipient varchar(50) null comment 'IT recipients for report mailing',
incidencesEmail varchar(50) null comment 'CAU destinatary email',
constraint printConfig_pk
primary key (id)
)
comment 'Print service config';

View File

@ -1,6 +0,0 @@
alter table `vn`.`sample`
add model VARCHAR(25) null comment 'Model name in plural';
UPDATE vn.sample t
SET t.model = 'Clients'
WHERE t.id IN(12, 13, 14, 15, 16, 18, 19, 20);

View File

@ -1 +0,0 @@
ALTER TABLE `account`.`user` ADD hasGrant TINYINT(1) NOT NULL;

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalId)
VALUES ('WorkerDisableExcluded','*','*','ALLOW','hr');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Business', '*', '*', 'ALLOW', 'ROLE', 'hr');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Sale', 'usesMana', '*', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,2 +0,0 @@
INSERT INTO `vn`.`payDem` (id,payDem)
VALUES (7,'0');

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('InvoiceIn', 'invoiceInPdf', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('InvoiceIn', 'invoiceInEmail', 'WRITE', 'ALLOW', 'ROLE', 'administrative');

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (model,property,accessType,principalId)
VALUES ('Supplier','newSupplier','WRITE','administrative');

View File

@ -1,63 +0,0 @@
USE util;
CREATE TABLE notification(
id INT PRIMARY KEY,
`name` VARCHAR(255) UNIQUE,
`description` VARCHAR(255)
);
CREATE TABLE notificationAcl(
notificationFk INT,
roleFk INT(10) unsigned,
PRIMARY KEY(notificationFk, roleFk)
);
ALTER TABLE `util`.`notificationAcl` ADD CONSTRAINT `notificationAcl_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationAcl` ADD CONSTRAINT `notificationAcl_ibfk_2` FOREIGN KEY (`roleFk`) REFERENCES `account`.`role`(`id`)
ON DELETE RESTRICT
ON UPDATE CASCADE;
CREATE TABLE notificationSubscription(
notificationFk INT,
userFk INT(10) unsigned,
PRIMARY KEY(notificationFk, userFk)
);
ALTER TABLE `util`.`notificationSubscription` ADD CONSTRAINT `notificationSubscription_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationSubscription` ADD CONSTRAINT `notificationSubscription_ibfk_2` FOREIGN KEY (`userFk`) REFERENCES `account`.`user`(`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
CREATE TABLE notificationQueue(
id INT PRIMARY KEY AUTO_INCREMENT,
notificationFk VARCHAR(255),
params JSON,
authorFk INT(10) unsigned NULL,
`status` ENUM('pending', 'sent', 'error') NOT NULL DEFAULT 'pending',
created DATETIME DEFAULT CURRENT_TIMESTAMP,
INDEX(notificationFk),
INDEX(authorFk),
INDEX(status)
);
ALTER TABLE `util`.`notificationQueue` ADD CONSTRAINT `nnotificationQueue_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`name`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationQueue` ADD CONSTRAINT `notificationQueue_ibfk_2` FOREIGN KEY (`authorFk`) REFERENCES `account`.`user`(`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
CREATE TABLE notificationConfig(
id INT PRIMARY KEY AUTO_INCREMENT,
cleanDays MEDIUMINT
);
INSERT INTO notificationConfig
SET cleanDays = 90;

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`supplier` MODIFY COLUMN payMethodFk tinyint(3) unsigned NULL;

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`supplier` MODIFY COLUMN supplierActivityFk varchar(45) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL NULL;

Some files were not shown because too many files have changed in this diff Show More