WIP: 6367-blankNotification #1903

Draft
pablone wants to merge 11 commits from 6367-blankNotification into dev
2956 changed files with 131065 additions and 78876 deletions
Showing only changes of commit 84c2c65850 - Show all commits

View File

@ -1,4 +1,6 @@
node_modules
print/node_modules
front/node_modules
services
front
db
e2e
storage

View File

@ -16,6 +16,7 @@
},
"cSpell.words": [
"salix",
"fdescribe"
"fdescribe",
"Loggable"
]
}

View File

@ -5,59 +5,94 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2408.01] - 2024-02-22
### Added
### Changed
### Fixed
## [2406.01] - 2024-02-08
### Added
### Changed
### Fixed
## [2404.01] - 2024-01-25
### Added
### Changed
### Fixed
## [2402.01] - 2024-01-11
### Added
### Changed
### Fixed
## [2400.01] - 2024-01-04
### Added
### Changed
### Fixed
## [2350.01] - 2023-12-14
### Características Añadidas 🆕
- **Tickets → Expediciones:** Añadido soporte para Viaexpress
- **Tickets → Expediciones:** Añadido soporte para Viaexpress
## [2348.01] - 2023-11-30
### Características Añadidas 🆕
- **Tickets → Adelantar:** Permite mover lineas sin generar negativos
- **Tickets → Adelantar:** Permite modificar la fecha de los tickets
- **Trabajadores → Notificaciones:** Nueva sección (lilium)
### Correcciones 🛠️
- **Tickets → RocketChat:** Arreglada detección de cambios
- **Tickets → RocketChat:** Arreglada detección de cambios
## [2346.01] - 2023-11-16
### Added
### Changed
### Fixed
### Changed
### Fixed
## [2342.01] - 2023-11-02
### Added
- (Usuarios -> Foto) Se muestra la foto del trabajador
### Fixed
- (Usuarios -> Historial) Abre el descriptor del usuario correctamente
- (Usuarios -> Foto) Se muestra la foto del trabajador
### Fixed
- (Usuarios -> Historial) Abre el descriptor del usuario correctamente
## [2340.01] - 2023-10-05
## [2338.01] - 2023-09-21
### Added
- (Ticket -> Servicios) Se pueden abonar servicios
- (Facturas -> Datos básicos) Muestra valores por defecto
- (Facturas -> Borrado) Notificación al borrar un asiento ya enlazado en Sage
### Changed
- (Trabajadores -> Calendario) Icono de check arreglado cuando pulsas un tipo de dia
## [2336.01] - 2023-09-07
@ -65,41 +100,45 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [2334.01] - 2023-08-24
### Added
- (General -> Errores) Botón para enviar cau con los datos del error
- (General -> Errores) Botón para enviar cau con los datos del error
## [2332.01] - 2023-08-10
### Added
- (Trabajadores -> Gestión documental) Soporte para Docuware
- (General -> Agencia) Soporte para Viaexpress
- (Tickets -> SMS) Nueva sección en Lilium
### Changed
- (General -> Tickets) Devuelve el motivo por el cual no es editable
- (Desplegables -> Trabajadores) Mejorados
- (General -> Clientes) Razón social y dirección en mayúsculas
### Fixed
- (Clientes -> SMS) Al pasar el ratón por encima muestra el mensaje completo
- (Clientes -> SMS) Al pasar el ratón por encima muestra el mensaje completo
## [2330.01] - 2023-07-27
### Added
- (Artículos -> Vista Previa) Añadido campo "Plástico reciclado"
- (Rutas -> Troncales) Nueva sección
- (Tickets -> Opciones) Opción establecer peso
- (Clientes -> SMS) Nueva sección
### Changed
- (General -> Iconos) Añadidos nuevos iconos
- (Clientes -> Razón social) Permite crear clientes con la misma razón social según el país
## [2328.01] - 2023-07-13
### Added
- (Clientes -> Morosos) Añadida columna "es trabajador"
- (Trabajadores -> Departamentos) Nueva sección
- (Trabajadores -> Departamentos) Añadido listado de Trabajadores por departamento
@ -108,28 +147,32 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Changed
### Fixed
- (Trabajadores -> Departamentos) Arreglado búscador
- (Trabajadores -> Departamentos) Arreglado búscador
## [2326.01] - 2023-06-29
### Added
- (Entradas -> Correo) Al cambiar el tipo de cambio enviará un correo a las personas designadas
- (General -> Históricos) Botón para ver el estado del registro en cada punto
- (General -> Históricos) Al filtar por registro se muestra todo el histórial desde que fue creado
- (Tickets -> Índice) Permite enviar varios albaranes a Docuware
### Changed
- (General -> Históricos) Los registros se muestran agrupados por usuario y entidad
- (Facturas -> Facturación global) Optimizada, generación de PDFs y notificaciones en paralelo
### Fixed
- (General -> Históricos) Duplicidades eliminadas
- (Facturas -> Facturación global) Solucionados fallos que paran el proceso
## [2324.01] - 2023-06-15
### Added
- (Tickets -> Abono) Al abonar permite crear el ticket abono con almacén o sin almmacén
- (General -> Desplegables) Mejorada eficiencia de carga de datos
- (General -> Históricos) Ahora, ademas de los ids, se muestra la descripión de los atributos
@ -137,77 +180,84 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- (General -> Históricos) Filtro por cambios
### Changed
- (General -> Permisos) Mejorada seguridad
- (General -> Históricos) Elementos de la interfaz reorganizados para hacerla más ágil e intuitiva
### Fixed
-
## [2322.01] - 2023-06-01
### Added
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
- (Artículos -> Histórico) Filtro para mostrar lo anterior al inventario
- (Trabajadores -> Nuevo trabajador) Permite elegir el método de pago
### Changed
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
- (Tickets -> Expediciones) Interfaz mejorada y contador añadido
### Fixed
- (Tickets -> Líneas) Se permite hacer split de líneas al mismo ticket
- (Tickets -> Cambiar estado) Ahora muestra la lista completa de todos los estados
## [2320.01] - 2023-05-25
### Added
- (Tickets -> Crear Factura) Al facturar se envia automáticamente el pdf al cliente
### Changed
- (Trabajadores -> Nuevo trabajador) Los clientes se crean sin 'TR' pero se añade tipo de negocio 'Trabajador'
### Fixed
-
## [2318.01] - 2023-05-08
### Added
- (Usuarios -> Histórico) Nueva sección
- (Roles -> Histórico) Nueva sección
- (Trabajadores -> Dar de alta) Permite elegir el método de pago
### Changed
- (Artículo -> Precio fijado) Modificado el buscador superior por uno lateral
- (Trabajadores -> Dar de alta) Quitada obligatoriedad del iban
### Fixed
- (Ticket -> Boxing) Arreglado selección de horas
- (Cesta -> Índice) Optimizada búsqueda
## [2314.01] - 2023-04-20
### Added
- (Clientes -> Morosos) Ahora se puede filtrar por las columnas "Desde" y "Fecha Ú. O.". También se envia un email al comercial cuando se añade una nota.
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
- (Facturas recibidas -> Bases negativas) Nueva sección
### Fixed
- (Clientes -> Morosos) Ahora se mantienen los elementos seleccionados al hacer sroll.
## [2312.01] - 2023-04-06
### Added
- (Monitor tickets) Muestra un icono al lado de la zona, si el ticket es frágil y se envía por agencia
### Changed
- (Monitor tickets) Cuando se filtra por 'Pendiente' ya no muestra los estados de 'Previa'
- (Envíos -> Extra comunitarios) Se agrupan las entradas del mismo travel. Añadidos campos Referencia y Importe.
- (Envíos -> Índice) Cambiado el buscador superior por uno lateral
@ -215,33 +265,40 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [2310.01] - 2023-03-23
### Added
- (Trabajadores -> Control de horario) Ahora se puede confirmar/no confirmar el registro horario de cada semana desde esta sección
### Fixed
- (Clientes -> Listado extendido) Resuelto error al filtrar por clientes inactivos desde la columna "Activo"
- (General) Al pasar el ratón por encima del icono de "Borrar" en un campo, se hacía más grande afectando a la interfaz
## [2308.01] - 2023-03-09
### Added
- (Proveedores -> Datos fiscales) Añadido checkbox 'Vies'
- (Client -> Descriptor) Nuevo icono $ con barrotes para los clientes con impago
- (Trabajador -> Datos Básicos) Añadido nuevo campo Taquilla
- (Trabajador -> PDA) Nueva sección
### Changed
- (Ticket -> Borrar ticket) Restringido el borrado de tickets con abono
## [2306.01] - 2023-02-23
### Added
- (Tickets -> Datos Básicos) Mensaje de confirmación al intentar generar tickets con negativos
- (Artículos) El visible y disponible se calcula a partir de un almacén diferente dependiendo de la sección en la que te encuentres. Se ha añadido un icono que informa sobre a partir de que almacén se esta calculando.
### Changed
- (General -> Inicio) Ahora permite recuperar la contraseña tanto con el correo de recuperación como el usuario
### Fixed
- (Monitor de tickets) Cuando ordenas por columna, ya no se queda deshabilitado el botón de 'Actualizar'
- (Zone -> Días de entrega) Al hacer click en un día, muestra correctamente las zonas
- (Artículos) El disponible en la vista previa se muestra correctamente
@ -249,12 +306,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [2304.01] - 2023-02-09
### Added
- (Rutas) Al descargar varias facturas se comprime en un zip
- (Trabajadores -> Nuevo trabajador) Nueva sección
- (Tickets -> Adelantar tickets) Añadidos campos "líneas" y "litros" al ticket origen
- (Tickets -> Adelantar tickets) Nuevo icono muestra cuando las agencias de los tickets origen/destino son distintas
### Changed
- (Entradas -> Compras) Cambiados los campos "Precio Grouping/Packing" por "PVP" y "Precio" por "Coste"
- (Artículos -> Últimas entradas) Cambiados los campos "P.P.U." y "P.P.P." por "PVP"
- (Rutas -> Sumario/Tickets) Actualizados campos de los tickets
@ -263,6 +322,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- (Tickets -> Adelantar tickets) Cambiado stock de destino a origen.
### Fixed
- (Artículos -> Etiquetas) Permite intercambiar la relevancia entre dos etiquetas.
- (Cliente -> Datos Fiscales) No se permite seleccionar 'Notificar vía e-mail' a los clientes sin e-mail
- (Tickets -> Datos básicos) Permite guardar la hora de envío
@ -273,17 +333,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [2302.01] - 2023-01-26
### Added
- (General -> Inicio) Permite recuperar la contraseña
- (Tickets -> Opciones) Subir albarán a Docuware
- (Tickets -> Opciones) Enviar correo con PDF de Docuware
- (Artículos -> Datos Básicos) Añadido campo Unidades/Caja
### Changed
- (Reclamaciones -> Descriptor) Cambiado el campo Agencia por Zona
- (Tickets -> Líneas preparadas) Actualizada sección para que sea más visual
### Fixed
- (General) Al utilizar el traductor de Google se descuadraban los iconos
### Removed
- (Tickets -> Control clientes) Eliminada sección

View File

@ -13,7 +13,7 @@ RUN apt-get update \
graphicsmagick \
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y --no-install-recommends nodejs \
&& npm install -g npm@9.6.6
&& corepack enable pnpm
# Puppeteer
@ -39,12 +39,12 @@ RUN apt-get update \
WORKDIR /salix
COPY print/package.json print/package-lock.json print/
RUN npm --prefix ./print install --omit=dev ./print
COPY print/package.json print/pnpm-lock.yaml print/
RUN pnpm install --prod --prefix=print
COPY package.json package-lock.json ./
COPY package.json pnpm-lock.yaml ./
COPY loopback/package.json loopback/
RUN npm install --omit=dev
RUN pnpm install --prod
COPY loopback loopback
COPY back back

287
Jenkinsfile vendored
View File

@ -1,144 +1,253 @@
#!/usr/bin/env groovy
def PROTECTED_BRANCH
def FROM_GIT
def RUN_TESTS
def RUN_BUILD
def BRANCH_ENV = [
test: 'test',
master: 'production'
]
node {
stage('Setup') {
env.BACK_REPLICAS = 1
env.NODE_ENV = BRANCH_ENV[env.BRANCH_NAME] ?: 'dev'
PROTECTED_BRANCH = [
'dev',
'test',
'master'
].contains(env.BRANCH_NAME)
FROM_GIT = env.JOB_NAME.startsWith('gitea/')
RUN_TESTS = !PROTECTED_BRANCH && FROM_GIT
RUN_BUILD = PROTECTED_BRANCH && FROM_GIT
// https://www.jenkins.io/doc/book/pipeline/jenkinsfile/#using-environment-variables
echo "NODE_NAME: ${env.NODE_NAME}"
echo "WORKSPACE: ${env.WORKSPACE}"
configFileProvider([
configFile(fileId: 'salix.properties',
variable: 'PROPS_FILE')
]) {
def props = readProperties file: PROPS_FILE
props.each {key, value -> env."${key}" = value }
props.each {key, value -> echo "${key}: ${value}" }
}
if (PROTECTED_BRANCH) {
configFileProvider([
configFile(fileId: "salix.branch.${env.BRANCH_NAME}",
variable: 'BRANCH_PROPS_FILE')
]) {
def props = readProperties file: BRANCH_PROPS_FILE
props.each {key, value -> env."${key}" = value }
props.each {key, value -> echo "${key}: ${value}" }
}
}
}
}
pipeline {
agent any
options {
disableConcurrentBuilds()
}
tools {
nodejs 'node-v20'
}
environment {
PROJECT_NAME = 'salix'
STACK_NAME = "${env.PROJECT_NAME}-${env.BRANCH_NAME}"
}
stages {
stage('Checkout') {
steps {
script {
switch (env.BRANCH_NAME) {
case 'master':
env.NODE_ENV = 'production'
env.BACK_REPLICAS = 4
break
case 'test':
env.NODE_ENV = 'test'
env.BACK_REPLICAS = 2
break
}
}
configFileProvider([
configFile(fileId: "salix.groovy",
variable: 'GROOVY_FILE')
]) {
load env.GROOVY_FILE
}
setEnv()
}
}
stage('Install') {
environment {
NODE_ENV = ""
}
steps {
nodejs('node-v20') {
sh 'npm install --no-audit --prefer-offline'
sh 'gulp install --ci'
}
}
}
stage('Test') {
when { not { anyOf {
branch 'test'
branch 'master'
}}}
environment {
NODE_ENV = ""
TZ = 'Europe/Madrid'
NODE_ENV = ''
}
parallel {
stage('Frontend') {
stage('Back') {
steps {
nodejs('node-v20') {
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=2'
sh 'pnpm install --prefer-offline'
}
}
stage('Print') {
when {
expression { FROM_GIT }
}
steps {
sh 'pnpm install --prefer-offline --prefix=print'
}
}
stage('Front') {
when {
expression { FROM_GIT }
}
steps {
sh 'pnpm install --prefer-offline --prefix=front'
}
}
}
}
stage('Stack') {
parallel {
stage('Back') {
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
stage('Backend') {
steps {
nodejs('node-v20') {
sh 'npm run test:back:ci'
}
}
post {
always {
junit(
testResults: 'junitresults.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when { anyOf {
branch 'test'
branch 'master'
}}
when {
expression { RUN_BUILD }
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh 'docker-compose build back'
}
}
}
}
stage('Front') {
when {
expression { FROM_GIT }
}
stages {
stage('Test') {
when {
expression { RUN_TESTS }
}
environment {
NODE_ENV = ''
}
steps {
sh 'jest --ci --reporters=default --reporters=jest-junit --maxWorkers=10'
}
post {
always {
junit(
testResults: 'junit.xml',
allowEmptyResults: true
)
}
}
}
stage('Build') {
when {
expression { RUN_BUILD }
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh 'gulp build'
sh 'docker-compose build front'
}
}
}
}
}
}
stage('Push') {
when {
expression { RUN_BUILD }
}
environment {
CREDENTIALS = credentials('docker-registry')
}
steps {
nodejs('node-v20') {
sh 'gulp build'
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
dockerBuild()
sh 'docker login --username $CREDENTIALS_USR --password $CREDENTIALS_PSW $REGISTRY'
sh 'docker-compose push'
}
}
stage('Deploy') {
when { anyOf {
branch 'test'
branch 'master'
}}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
}
steps {
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
}
when {
expression { PROTECTED_BRANCH }
}
parallel {
stage('Database') {
when { anyOf {
branch 'test'
branch 'master'
}}
steps {
configFileProvider([
configFile(fileId: "config.${env.NODE_ENV}.ini",
variable: 'MYSQL_CONFIG')
]) {
sh 'cp "$MYSQL_CONFIG" db/config.$NODE_ENV.ini'
sh 'mkdir -p db/remotes'
sh 'cp "$MYSQL_CONFIG" db/remotes/$NODE_ENV.ini'
}
sh 'db/import-changes.sh -f $NODE_ENV'
sh 'npx myt push $NODE_ENV --force --commit'
}
}
stage('Docker') {
when {
expression { FROM_GIT }
}
environment {
DOCKER_HOST = "${env.SWARM_HOST}"
}
steps {
script {
def packageJson = readJSON file: 'package.json'
env.VERSION = packageJson.version
}
sh "docker stack deploy --with-registry-auth --compose-file docker-compose.yml ${env.STACK_NAME}"
}
}
}
}
}
post {
always {
success {
script {
if (!['master', 'test'].contains(env.BRANCH_NAME)) {
try {
junit 'junitresults.xml'
junit 'junit.xml'
} catch (e) {
echo e.toString()
}
}
if (env.BRANCH_NAME == 'master' && FROM_GIT) {
env.GIT_COMMIT_MSG = sh(
script: 'git log -1 --pretty=%B ${GIT_COMMIT}',
returnStdout: true
).trim()
if (!env.COMMITTER_EMAIL || currentBuild.currentResult == 'SUCCESS') return;
try {
mail(
to: env.COMMITTER_EMAIL,
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
body: "Check status at ${env.BUILD_URL}"
String message = env.GIT_COMMIT_MSG
int index = message.indexOf('\n')
if (index != -1)
message = message.substring(0, index)
rocketSend(
channel: 'vn-database',
message: "*DB version uploaded:* ${message}"
+"\n$COMMITTER_EMAIL ($BRANCH_NAME)"
+"\n$GIT_URL/commit/$GIT_COMMIT",
rawMessage: true
)
} catch (e) {
echo e.toString()
}
}
}
unsuccessful {
setEnv()
sendEmail()
}
}
}

View File

@ -31,7 +31,7 @@ describe('docuware upload()', () => {
try {
const options = {transaction: tx};
const user = await models.UserConfig.findById(userId, null, options);
await user.updateAttribute('tabletFk', 'Tablet1');
await user.updateAttribute('tabletFk', 'Tablet1', options);
await models.Docuware.upload(ctx, ticketIds, fileCabinetName, options);
await tx.rollback();

View File

@ -95,10 +95,7 @@ describe('image upload()', () => {
spyOn(containerModel, 'upload');
const ctx = {req: {accessToken: {userId: hhrrId}},
args: {
id: itemId,
collection: 'user'
}
args: {id: itemId, collection: 'user'}
};
try {
@ -109,7 +106,7 @@ describe('image upload()', () => {
});
it('should try to upload a file for the collection "catalog" and throw a privilege error', async() => {
const ctx = {req: {accessToken: {userId: hhrrId}},
const ctx = {req: {accessToken: {userId: 1}},
args: {
id: workerId,
collection: 'catalog'

View File

@ -35,10 +35,17 @@ module.exports = Self => {
let html = `<strong>Motivo</strong>:<br/>${reason}<br/>`;
html += `<strong>Usuario</strong>:<br/>${ctx.req.accessToken.userId} ${emailUser.email}<br/>`;
delete additionalData.backError.config.headers.Authorization;
const httpRequest = JSON.parse(additionalData?.httpRequest);
if (httpRequest)
delete httpRequest.config.headers.Authorization;
additionalData.httpRequest = httpRequest;
for (const data in additionalData)
html += `<strong>${data}</strong>:<br/>${tryParse(additionalData[data])}<br/>`;
const subjectReason = JSON.parse(additionalData?.httpRequest)?.data?.error;
const subjectReason = httpRequest?.data?.error;
smtp.send({
to: `${config.app.reportEmail}, ${emailUser.email}`,
subject:

View File

@ -0,0 +1,80 @@
const {ParameterizedSQL} = require('loopback-connector');
const {buildFilter} = require('vn-loopback/util/filter');
module.exports = Self => {
Self.remoteMethod('filter', {
description:
'Find all postcodes of the model matched by postcode, town, province or country.',
accessType: 'READ',
accepts: [
{
arg: 'filter',
type: 'object',
description: 'Filter defining where, order, offset, and limit - must be a JSON-encoded string',
http: {source: 'query'}
},
{
arg: 'search',
type: 'string',
description: 'Value to filter',
http: {source: 'query'}
},
],
returns: {
type: ['object'],
root: true,
},
http: {
path: `/filter`,
verb: 'GET',
},
});
Self.filter = async(ctx, filter, options) => {
const myOptions = {};
if (typeof options == 'object')
Object.assign(myOptions, options);
filter = ctx?.filter ?? {};
const conn = Self.dataSource.connector;
const where = buildFilter(filter?.where, (param, value) => {
switch (param) {
case 'search':
return {
or: [
{'pc.code': {like: `%${value}%`}},
{'t.name': {like: `%${value}%`}},
{'p.name': {like: `%${value}%`}},
{'c.country': {like: `%${value}%`}}
]
};
}
}) ?? {};
delete ctx.filter.where;
const stmts = [];
let stmt;
stmt = new ParameterizedSQL(`
SELECT
pc.townFk,
t.provinceFk,
p.countryFk,
pc.code,
t.name as town,
p.name as province,
c.country
FROM
postCode pc
JOIN town t on t.id = pc.townFk
JOIN province p on p.id = t.provinceFk
JOIN country c on c.id = p.countryFk
`);
stmt.merge(conn.makeSuffix({where, ...ctx}));
const itemsIndex = stmts.push(stmt) - 1;
const sql = ParameterizedSQL.join(stmts, ';');
const result = await conn.executeStmt(sql, myOptions);
return itemsIndex === 0 ? result : result[itemsIndex];
};
};

View File

@ -0,0 +1,111 @@
const {models} = require('vn-loopback/server/server');
describe('Postcode filter()', () => {
it('should retrieve with no filter', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
},
limit: 1
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as postcode', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 46,
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(4);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as city', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'Alz',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as province', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'one',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(4);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
it('should retrieve with filter as country', async() => {
const tx = await models.Postcode.beginTransaction({});
const options = {transaction: tx};
try {
const ctx = {
filter: {
where: {
search: 'Ec',
}
},
};
const results = await models.Postcode.filter(ctx, options);
expect(results.length).toEqual(1);
await tx.rollback();
} catch (e) {
await tx.rollback();
throw e;
}
});
});

View File

@ -20,7 +20,7 @@ module.exports = Self => {
}
});
Self.internationalExpedition = async expeditionFk => {
Self.internationalExpedition = async (expeditionFk) => {
const models = Self.app.models;
const viaexpressConfig = await models.ViaexpressConfig.findOne({

View File

@ -20,11 +20,11 @@ module.exports = Self => {
}
});
Self.renderer = async expeditionFk => {
Self.renderer = async (expeditionFk) => {
const models = Self.app.models;
const viaexpressConfig = await models.ViaexpressConfig.findOne({
fields: ['client', 'user', 'password', 'defaultWeight', 'deliveryType']
fields: ['client', 'user', 'password', 'defaultWeight', 'deliveryType', 'agencyModeFk']
});
const expedition = await models.Expedition.findOne({
@ -34,7 +34,7 @@ module.exports = Self => {
{
relation: 'ticket',
scope: {
fields: ['shipped', 'addressFk', 'clientFk', 'companyFk'],
fields: ['shipped', 'addressFk', 'clientFk', 'companyFk', 'agencyModeFk'],
include: [
{
relation: 'client',
@ -102,7 +102,6 @@ module.exports = Self => {
}
]
}
}
]
});
@ -110,13 +109,15 @@ module.exports = Self => {
const ticket = expedition.ticket();
const sender = ticket.company().client();
const shipped = ticket.shipped.toISOString();
const isInterdia = (ticket.agencyModeFk === viaexpressConfig.agencyModeFk)
const data = {
viaexpressConfig,
sender,
senderAddress: sender.defaultAddress(),
client: ticket.client(),
address: ticket.address(),
shipped
shipped,
isInterdia
};
const template = fs.readFileSync(__dirname + '/template.ejs', 'utf-8');

View File

@ -13,7 +13,7 @@
<Asegurado>0</Asegurado>
<Imprimir>0</Imprimir>
<ConDevolucionAlbaran>0</ConDevolucionAlbaran>
<Intradia>0</Intradia>
<Intradia><%= isInterdia %></Intradia>
<Observaciones></Observaciones>
<AlbaranRemitente></AlbaranRemitente>
<Modo>0</Modo>

View File

@ -68,7 +68,7 @@ module.exports = Self => {
userToUpdate.hasGrant = hasGrant;
if (roleFk) {
const role = await models.Role.findById(roleFk, {fields: ['name']}, myOptions);
const role = await models.VnRole.findById(roleFk, {fields: ['name']}, myOptions);
const hasRole = await Self.hasRole(userId, role.name, myOptions);
if (!hasRole)

View File

@ -1,14 +1,5 @@
const UserError = require('vn-loopback/util/user-error');
const {models} = require('vn-loopback/server/server');
const handlePromiseLogout = (Self, {id}, courtesyTime) => {
new Promise(res => {
setTimeout(() => {
res(Self.logout(id));
}
, courtesyTime * 1000);
});
};
module.exports = Self => {
Self.remoteMethodCtx('renewToken', {
description: 'Checks if the token has more than renewPeriod seconds to live and if so, renews it',
@ -28,14 +19,26 @@ module.exports = Self => {
const {accessToken: token} = ctx.req;
// Check if current token is valid
const isValid = await validateToken(token);
if (isValid)
const {renewPeriod, courtesyTime} = await models.AccessTokenConfig.findOne({
fields: ['renewPeriod', 'courtesyTime']
});
const now = Date.now();
const differenceMilliseconds = now - token.created;
const differenceSeconds = Math.floor(differenceMilliseconds / 1000);
const isNotExceeded = differenceSeconds < renewPeriod - courtesyTime;
if (isNotExceeded)
return token;
const {courtesyTime} = await models.AccessTokenConfig.findOne({fields: ['courtesyTime']});
// Schedule to remove current token
handlePromiseLogout(Self, token, courtesyTime);
setTimeout(async() => {
try {
await Self.logout(token.id);
} catch (err) {
// eslint-disable-next-line no-console
console.error(err);
}
}, courtesyTime * 1000);
// Create new accessToken
const user = await Self.findById(token.userId);
@ -43,14 +46,4 @@ module.exports = Self => {
return {id: accessToken.id, ttl: accessToken.ttl};
};
async function validateToken(token) {
const accessTokenConfig = await models.AccessTokenConfig.findOne({fields: ['renewPeriod', 'courtesyTime']});
const now = Date.now();
const differenceMilliseconds = now - token.created;
const differenceSeconds = Math.floor(differenceMilliseconds / 1000);
const isValid = differenceSeconds < accessTokenConfig.renewPeriod - accessTokenConfig.courtesyTime;
return isValid;
}
};

View File

@ -70,7 +70,7 @@ describe('VnUser privileges()', () => {
const tx = await models.VnUser.beginTransaction({});
const options = {transaction: tx};
const agency = await models.Role.findOne({
const agency = await models.VnRole.findOne({
where: {
name: 'agency'
}

View File

@ -30,7 +30,6 @@ describe('Renew Token', () => {
it('should renew token', async() => {
const mockDate = new Date(startingTime + 26600000);
jasmine.clock().mockDate(mockDate);
console.log(startingTime, mockDate)
const {id} = await models.VnUser.renewToken(ctx);
expect(id).not.toEqual(ctx.req.accessToken.id);

View File

@ -139,9 +139,6 @@
"Warehouse": {
"dataSource": "vn"
},
"VnUser": {
"dataSource": "vn"
},
"OsTicket": {
"dataSource": "osticket"
},
@ -156,6 +153,12 @@
},
"ViaexpressConfig": {
"dataSource": "vn"
},
"VnUser": {
"dataSource": "vn"
},
"VnRole": {
"dataSource": "vn"
}
}

View File

@ -8,6 +8,26 @@ module.exports = Self => {
});
Self.validatesUniquenessOf('bic', {
message: 'This BIC already exist.'
message: 'This BIC already exist'
});
Self.validatesPresenceOf('countryFk', {
message: 'CountryFK cannot be empty'
});
Self.validateAsync('bic', checkBic, {
message: 'Bank entity id must be specified'
});
async function checkBic(err, done) {
const filter = {
fields: ['code'],
where: {id: this.countryFk}
};
const country = await Self.app.models.Country.findOne(filter);
const code = country ? country.code.toLowerCase() : null;
if (code == 'es' && !this.id)
err();
done();
}
};

View File

@ -17,10 +17,6 @@
"type": "string",
"required": true
},
"path": {
"type": "string",
"required": true
},
"code": {
"type": "string",
"required": true
@ -29,12 +25,12 @@
"relations": {
"readRole": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "readRoleFk"
},
"writeRole": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "writeRoleFk"
}
},

View File

@ -46,12 +46,12 @@
},
"readRole": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "readRoleFk"
},
"writeRole": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "writeRoleFk"
}
},
@ -64,4 +64,3 @@
}
]
}

View File

@ -24,7 +24,7 @@
},
"role": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "roleFk"
}
}

View File

@ -1,6 +1,7 @@
let UserError = require('vn-loopback/util/user-error');
module.exports = Self => {
require('../methods/postcode/filter.js')(Self);
Self.rewriteDbError(function(err) {
if (err.code === 'ER_DUP_ENTRY')
return new UserError(`This postcode already exists`);

View File

@ -0,0 +1,73 @@
const models = require('vn-loopback/server/server').models;
describe('loopback model MailAliasAccount', () => {
it('should fail to add a mail Alias if the worker doesnt have ACLs', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 57}};
await models.MailAliasAccount.create({mailAlias: 2, account: 5}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error.message).toEqual('The alias cant be modified');
});
it('should add a mail Alias', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 9}};
await models.MailAliasAccount.create({mailAlias: 2, account: 5}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
it('should add a mail Alias of an inherit role', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 9}};
await models.MailAliasAccount.create({mailAlias: 3, account: 5}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
it('should delete a mail Alias', async() => {
const tx = await models.MailAliasAccount.beginTransaction({});
let error;
try {
const options = {transaction: tx, accessToken: {userId: 1}};
const mailAclId = 2;
await models.MailAliasAccount.destroyAll({id: mailAclId}, options);
await tx.rollback();
} catch (e) {
await tx.rollback();
error = e;
}
expect(error).toBeUndefined();
});
});

View File

@ -29,6 +29,9 @@
},
"deliveryType": {
"type": "string"
},
"agencyModeFk": {
"type": "number"
}
}
}

13
back/models/vn-role.json Normal file
View File

@ -0,0 +1,13 @@
{
"name": "VnRole",
"base": "Role",
"validateUpsert": true,
"options": {
"mysql": {
"table": "account.role"
}
},
"mixins": {
"Loggable": true
}
}

View File

@ -258,18 +258,20 @@ module.exports = function(Self) {
class Mailer {
async send(verifyOptions, cb) {
try {
const url = new URL(verifyOptions.verifyHref);
if (process.env.NODE_ENV) url.port = '';
const params = {
const email = new Email('email-verify', {
url: url.href,
recipient: verifyOptions.to
};
const email = new Email('email-verify', params);
email.send();
});
await email.send();
cb(null, verifyOptions.to);
} catch (err) {
cb(err);
}
}
}

View File

@ -7,6 +7,9 @@
"table": "account.user"
}
},
"mixins": {
"Loggable": true
},
"resetPasswordTokenTTL": "604800",
"properties": {
"id": {
@ -63,7 +66,7 @@
"relations": {
"role": {
"type": "belongsTo",
"model": "Role",
"model": "VnRole",
"foreignKey": "roleFk"
},
"roles": {
@ -95,27 +98,30 @@
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
},
{
}, {
"property": "recoverPassword",
"accessType": "EXECUTE",
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
},
{
}, {
"property": "validateAuth",
"accessType": "EXECUTE",
"principalType": "ROLE",
"principalId": "$everyone",
"permission": "ALLOW"
},
{
}, {
"property": "privileges",
"accessType": "*",
"principalType": "ROLE",
"principalId": "$authenticated",
"permission": "ALLOW"
}, {
"property": "renewToken",
"accessType": "WRITE",
"principalType": "ROLE",
"principalId": "$authenticated",
"permission": "ALLOW"
}
],
"scopes": {

34
back/tests-helper.js Normal file
View File

@ -0,0 +1,34 @@
/* eslint-disable no-console */
const app = require('vn-loopback/server/server');
let dataSources = require('../loopback/server/datasources.json');
async function init() {
console.log('Initializing backend.');
dataSources = JSON.parse(JSON.stringify(dataSources));
Object.assign(dataSources.vn, {
host: process.env.DB_HOST,
port: process.env.DB_PORT
});
const bootOptions = {dataSources};
await new Promise((resolve, reject) => {
app.boot(bootOptions,
err => err ? reject(err) : resolve());
});
// FIXME: Workaround to wait for loopback to be ready
await app.models.Application.status();
}
async function deinit() {
console.log('Stopping backend.');
await app.disconnect();
}
module.exports = {
init,
deinit
};
if (require.main === module)
init();

View File

@ -1,84 +1,121 @@
const Docker = require('../db/docker.js');
let dataSources = require('../loopback/server/datasources.json');
/* eslint-disable no-console */
const path = require('path');
const getopts = require('getopts');
const Myt = require('@verdnatura/myt/myt');
const Run = require('@verdnatura/myt/myt-run');
const helper = require('./tests-helper');
process.on('warning', warning => {
console.log(warning.name);
console.log(warning.message);
console.log(warning.stack);
const opts = getopts(process.argv.slice(2), {
string: [
'network'
],
boolean: [
'ci',
'junit'
]
});
process.on('SIGUSR2', async() => {
if (container) await container.rm();
});
let server;
const PARALLEL = false;
const TIMEOUT = 900000;
process.on('exit', async function() {
if (container) await container.rm();
});
process.on('exit', teardown);
process.on('uncaughtException', onError);
process.on('unhandledRejection', onError);
const exitSignals = [
'SIGINT',
'SIGUSR1',
'SIGUSR2'
];
for (const signal of exitSignals)
process.on(signal, () => process.exit());
async function setup() {
console.log('Building and running DB container.');
const myt = new Myt();
await myt.init({
workspace: path.join(__dirname, '..'),
random: true,
ci: opts.ci,
tmpfs: process.platform == 'linux',
network: opts.network || null
});
server = await myt.run(Run);
await myt.deinit();
const {dbConfig} = server;
process.env.DB_HOST = dbConfig.host;
process.env.DB_PORT = dbConfig.port;
if (!PARALLEL)
await helper.init();
}
async function teardown() {
if (!server) return;
const oldServer = server;
server = null;
if (!PARALLEL)
await helper.deinit();
console.log('Stopping and removing DB container.');
await oldServer.rm();
}
async function onError(err) {
console.error(err);
process.exit(1);
}
let container;
async function test() {
let isCI = false;
if (process.argv[2] === 'ci')
isCI = true;
container = new Docker();
await container.run(isCI);
dataSources = JSON.parse(JSON.stringify(dataSources));
Object.assign(dataSources.vn, {
host: container.dbConf.host,
port: container.dbConf.port
});
const bootOptions = {dataSources};
const app = require('vn-loopback/server/server');
await new Promise((resolve, reject) => {
app.boot(bootOptions,
err => err ? reject(err) : resolve());
});
// FIXME: Workaround to wait for loopback to be ready
await app.models.Application.status();
let runner;
const config = {
globalSetup: setup,
globalSetupTimeout: TIMEOUT,
globalTeardown: teardown,
globalTeardownTimeout: TIMEOUT,
spec_dir: '.',
spec_files: [
'back/**/*[sS]pec.js',
'loopback/**/*[sS]pec.js',
'modules/*/back/**/*.[sS]pec.js'
],
helpers: []
};
if (PARALLEL) {
const ParallelRunner = require('jasmine/parallel');
runner = new ParallelRunner({numWorkers: 1});
config.helpers.push(`back/tests-helper.js`);
} else {
const Jasmine = require('jasmine');
const jasmine = new Jasmine();
runner = new Jasmine();
const SpecReporter = require('jasmine-spec-reporter').SpecReporter;
jasmine.addReporter(new SpecReporter({
runner.addReporter(new SpecReporter({
spec: {
displaySuccessful: isCI,
displayPending: isCI
displaySuccessful: opts.ci,
displayPending: opts.ci
},
summary: {
displayPending: false,
}
}));
if (isCI) {
const JunitReporter = require('jasmine-reporters');
jasmine.addReporter(new JunitReporter.JUnitXmlReporter());
jasmine.jasmine.DEFAULT_TIMEOUT_INTERVAL = 90000;
jasmine.exitOnCompletion = true;
}
const backSpecs = [
'./back/**/*[sS]pec.js',
'./loopback/**/*[sS]pec.js',
'./modules/*/back/**/*.[sS]pec.js'
];
if (opts.junit) {
const JunitReporter = require('jasmine-reporters');
runner.addReporter(new JunitReporter.JUnitXmlReporter());
}
if (opts.ci)
runner.jasmine.DEFAULT_TIMEOUT_INTERVAL = TIMEOUT;
jasmine.loadConfig({
spec_dir: '.',
spec_files: backSpecs,
helpers: [],
});
await jasmine.execute();
if (app) await app.disconnect();
if (container) await container.rm();
console.log('App disconnected & container removed');
// runner.loadConfigFile('back/jasmine.json');
runner.loadConfig(config);
await runner.execute();
}
test();

View File

@ -1,49 +0,0 @@
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('ClientConsumptionQueue', '*', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNotePdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteCsvPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'deliveryNoteCsvEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'campaignMetricsPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'campaignMetricsEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientWelcomeHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientWelcomeEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'creditRequestEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'printerSetupHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'printerSetupEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'sepaCoreEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorStHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorStEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorNdHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'letterDebtorNdEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'clientDebtStatementEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationHtml', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Client', 'incotermsAuthorizationEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Client', 'consumptionSendQueued', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('InvoiceOut', 'invoiceEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('InvoiceOut', 'exportationPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('InvoiceOut', 'sendQueued', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Ticket', 'invoiceCsvPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'invoiceCsvEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Supplier', 'campaignMetricsPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Supplier', 'campaignMetricsEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Travel', 'extraCommunityPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Travel', 'extraCommunityEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Entry', 'entryOrderPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('OsTicket', 'osTicketReportEmail', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Item', 'buyerWasteEmail', 'WRITE', 'ALLOW', 'ROLE', 'system'),
('Claim', 'claimPickupPdf', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Claim', 'claimPickupEmail', 'WRITE', 'ALLOW', 'ROLE', 'claimManager'),
('Item', 'labelPdf', 'READ', 'ALLOW', 'ROLE', 'employee');
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalType,principalId)
VALUES ('Sector','*','READ','ALLOW','ROLE','employee');
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalType,principalId)
VALUES ('Sector','*','WRITE','ALLOW','ROLE','employee');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Receipt', 'receiptPdf', '*', 'ALLOW', 'ROLE', 'salesAssistant');

View File

@ -1,9 +0,0 @@
create table `vn`.`clientConsumptionQueue`
(
id INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
params json not null,
queued datetime default current_timestamp() not null,
printed datetime null,
status varchar(50) default '' null
)
comment 'Queue for client consumption PDF mailing';

View File

@ -1 +0,0 @@
rename table `vn`.`invoiceOut_queue` to `vn`.`invoiceOutQueue`;

View File

@ -1,5 +0,0 @@
ALTER TABLE `vn`.`itemConfig`
ADD id int null PRIMARY KEY first;
ALTER TABLE `vn`.`itemConfig`
ADD wasteRecipients VARCHAR(50) NOT NULL comment 'Weekly waste report schedule recipients';

View File

@ -1,10 +0,0 @@
create table `salix`.`printConfig`
(
id int auto_increment,
itRecipient varchar(50) null comment 'IT recipients for report mailing',
incidencesEmail varchar(50) null comment 'CAU destinatary email',
constraint printConfig_pk
primary key (id)
)
comment 'Print service config';

View File

@ -1,6 +0,0 @@
alter table `vn`.`sample`
add model VARCHAR(25) null comment 'Model name in plural';
UPDATE vn.sample t
SET t.model = 'Clients'
WHERE t.id IN(12, 13, 14, 15, 16, 18, 19, 20);

View File

@ -1 +0,0 @@
ALTER TABLE `account`.`user` ADD hasGrant TINYINT(1) NOT NULL;

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalId)
VALUES ('WorkerDisableExcluded','*','*','ALLOW','hr');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Business', '*', '*', 'ALLOW', 'ROLE', 'hr');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Sale', 'usesMana', '*', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,2 +0,0 @@
INSERT INTO `vn`.`payDem` (id,payDem)
VALUES (7,'0');

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('InvoiceIn', 'invoiceInPdf', 'READ', 'ALLOW', 'ROLE', 'administrative'),
('InvoiceIn', 'invoiceInEmail', 'WRITE', 'ALLOW', 'ROLE', 'administrative');

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (model,property,accessType,principalId)
VALUES ('Supplier','newSupplier','WRITE','administrative');

View File

@ -1,63 +0,0 @@
USE util;
CREATE TABLE notification(
id INT PRIMARY KEY,
`name` VARCHAR(255) UNIQUE,
`description` VARCHAR(255)
);
CREATE TABLE notificationAcl(
notificationFk INT,
roleFk INT(10) unsigned,
PRIMARY KEY(notificationFk, roleFk)
);
ALTER TABLE `util`.`notificationAcl` ADD CONSTRAINT `notificationAcl_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationAcl` ADD CONSTRAINT `notificationAcl_ibfk_2` FOREIGN KEY (`roleFk`) REFERENCES `account`.`role`(`id`)
ON DELETE RESTRICT
ON UPDATE CASCADE;
CREATE TABLE notificationSubscription(
notificationFk INT,
userFk INT(10) unsigned,
PRIMARY KEY(notificationFk, userFk)
);
ALTER TABLE `util`.`notificationSubscription` ADD CONSTRAINT `notificationSubscription_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationSubscription` ADD CONSTRAINT `notificationSubscription_ibfk_2` FOREIGN KEY (`userFk`) REFERENCES `account`.`user`(`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
CREATE TABLE notificationQueue(
id INT PRIMARY KEY AUTO_INCREMENT,
notificationFk VARCHAR(255),
params JSON,
authorFk INT(10) unsigned NULL,
`status` ENUM('pending', 'sent', 'error') NOT NULL DEFAULT 'pending',
created DATETIME DEFAULT CURRENT_TIMESTAMP,
INDEX(notificationFk),
INDEX(authorFk),
INDEX(status)
);
ALTER TABLE `util`.`notificationQueue` ADD CONSTRAINT `nnotificationQueue_ibfk_1` FOREIGN KEY (`notificationFk`) REFERENCES `util`.`notification` (`name`)
ON DELETE CASCADE
ON UPDATE CASCADE;
ALTER TABLE `util`.`notificationQueue` ADD CONSTRAINT `notificationQueue_ibfk_2` FOREIGN KEY (`authorFk`) REFERENCES `account`.`user`(`id`)
ON DELETE CASCADE
ON UPDATE CASCADE;
CREATE TABLE notificationConfig(
id INT PRIMARY KEY AUTO_INCREMENT,
cleanDays MEDIUMINT
);
INSERT INTO notificationConfig
SET cleanDays = 90;

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`supplier` MODIFY COLUMN payMethodFk tinyint(3) unsigned NULL;

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`supplier` MODIFY COLUMN supplierActivityFk varchar(45) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL NULL;

View File

@ -1,5 +0,0 @@
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('ClaimRma', '*', 'READ', 'ALLOW', 'ROLE', 'claimManager'),
('ClaimRma', '*', 'WRITE', 'ALLOW', 'ROLE', 'claimManager');

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`claim` ADD rma varchar(100) NULL ;

View File

@ -1,7 +0,0 @@
CREATE TABLE `vn`.`claimRma` (
id INT UNSIGNED auto_increment NOT NULL PRIMARY KEY,
code varchar(100) NOT NULL,
created timestamp DEFAULT current_timestamp() NOT NULL,
workerFk INTEGER UNSIGNED NOT NULL
)
ENGINE=InnoDB;

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Notification', '*', 'WRITE', 'ALLOW', 'ROLE', 'developer');

View File

@ -1,12 +0,0 @@
CREATE TABLE `vn`.`packingSiteConfig` (
`id` INT(11) NOT NULL AUTO_INCREMENT,
`shinobiUrl` varchar(255) NOT NULL,
`shinobiToken` varchar(255) NOT NULL,
`shinobiGroupKey` varchar(255) NOT NULL,
`avgBoxingTime` INT(3) NULL,
PRIMARY KEY (`id`)
);
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Boxing', '*', '*', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,56 +0,0 @@
ALTER TABLE `vn`.`packingSite` ADD monitorId varchar(255) NULL;
UPDATE `vn`.`packingSite`
SET monitorId = 'VbiUcajdaT'
WHERE code = 'h1';
UPDATE `vn`.`packingSite`
SET monitorId = 'qKMPn9aaVe'
WHERE code = 'h2';
UPDATE `vn`.`packingSite`
SET monitorId = '3CtdIAGPAv'
WHERE code = 'h3';
UPDATE `vn`.`packingSite`
SET monitorId = 'Xme2hiqz1f'
WHERE code = 'h4';
UPDATE `vn`.`packingSite`
SET monitorId = 'aulxefgfJU'
WHERE code = 'h5';
UPDATE `vn`.`packingSite`
SET monitorId = '6Ou0D1bhBw'
WHERE code = 'h6';
UPDATE `vn`.`packingSite`
SET monitorId = 'eVUvnE6pNw'
WHERE code = 'h7';
UPDATE `vn`.`packingSite`
SET monitorId = '0wsmSvqmrs'
WHERE code = 'h8';
UPDATE `vn`.`packingSite`
SET monitorId = 'r2l2RyyF4I'
WHERE code = 'h9';
UPDATE `vn`.`packingSite`
SET monitorId = 'EdjHLIiDVD'
WHERE code = 'h10';
UPDATE `vn`.`packingSite`
SET monitorId = 'czC45kmwqI'
WHERE code = 'h11';
UPDATE `vn`.`packingSite`
SET monitorId = 'PNsmxPaCwQ'
WHERE code = 'h12';
UPDATE `vn`.`packingSite`
SET monitorId = 'agVssO0FDC'
WHERE code = 'h13';
UPDATE `vn`.`packingSite`
SET monitorId = 'f2SPNENHPo'
WHERE code = 'h14';
UPDATE `vn`.`packingSite`
SET monitorId = '6UR7gUZxks'
WHERE code = 'h15';
UPDATE `vn`.`packingSite`
SET monitorId = 'bOB0f8WZ2V'
WHERE code = 'h16';
UPDATE `vn`.`packingSite`
SET monitorId = 'MIR1nXaL0n'
WHERE code = 'h17';
UPDATE `vn`.`packingSite`
SET monitorId = '0Oj9SgGTXR'
WHERE code = 'h18';

View File

@ -1,33 +0,0 @@
CREATE TABLE `salix`.`url` (
`appName` varchar(100) NOT NULL,
`environment` varchar(100) NOT NULL,
`url` varchar(255) NOT NULL,
PRIMARY KEY (`appName`,`environment`)
);
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('salix', 'production', 'https://salix.verdnatura.es/#!/');
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('salix', 'test', 'https://test-salix.verdnatura.es/#!/');
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('salix', 'dev', 'http://localhost:5000/#!/');
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('lilium', 'production', 'https://lilium.verdnatura.es/#/');
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('lilium', 'test', 'https://test-lilium.verdnatura.es/#/');
INSERT INTO `salix`.`url` (`appName`, `environment`, `url`)
VALUES
('lilium', 'dev', 'http://localhost:8080/#/');
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Url', '*', 'READ', 'ALLOW', 'ROLE', 'employee');
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Url', '*', 'WRITE', 'ALLOW', 'ROLE', 'it');

View File

@ -1,2 +0,0 @@
DELETE FROM `salix`.`ACL`
WHERE model = 'UserPassword';

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`claimConfig` DROP COLUMN `pickupContact`;

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('ItemShelving', '*', 'READ', 'ALLOW', 'ROLE', 'employee'),
('ItemShelving', '*', 'WRITE', 'ALLOW', 'ROLE', 'production');

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('ItemShelvingPlacementSupplyStock', '*', 'READ', 'ALLOW', 'ROLE', 'employee');

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`workerTimeControlMail` CHANGE emailResponse reason text CHARACTER SET utf8mb3 COLLATE utf8mb3_unicode_ci DEFAULT NULL NULL;

View File

@ -1,54 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`zone_getPostalCode`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`zone_getPostalCode`(vSelf INT)
BEGIN
/**
* Devuelve los códigos postales incluidos en una zona
*/
DECLARE vGeoFk INT DEFAULT NULL;
DROP TEMPORARY TABLE IF EXISTS tmp.zoneNodes;
CREATE TEMPORARY TABLE tmp.zoneNodes (
geoFk INT,
name VARCHAR(100),
parentFk INT,
sons INT,
isChecked BOOL DEFAULT 0,
zoneFk INT,
PRIMARY KEY zoneNodesPk (zoneFk, geoFk),
INDEX(geoFk))
ENGINE = MEMORY;
CALL zone_getLeaves2(vSelf, NULL , NULL);
UPDATE tmp.zoneNodes zn
SET isChecked = 0
WHERE parentFk IS NULL;
myLoop: LOOP
SET vGeoFk = NULL;
SELECT geoFk INTO vGeoFk
FROM tmp.zoneNodes zn
WHERE NOT isChecked
LIMIT 1;
CALL zone_getLeaves2(vSelf, vGeoFk, NULL);
UPDATE tmp.zoneNodes
SET isChecked = TRUE
WHERE geoFk = vGeoFk;
IF vGeoFk IS NULL THEN
LEAVE myLoop;
END IF;
END LOOP;
DELETE FROM tmp.zoneNodes
WHERE sons > 0;
SELECT zn.geoFk, zn.name
FROM tmp.zoneNodes zn
JOIN zone z ON z.id = zn.zoneFk;
END$$
DELIMITER ;

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (model,property,accessType,permission,principalType,principalId)
VALUES ('NotificationQueue','*','*','ALLOW','ROLE','employee');

View File

@ -1,7 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('InvoiceOut', 'clientsToInvoice', 'WRITE', 'ALLOW', 'ROLE', 'invoicing');
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('InvoiceOut', 'invoiceClient', 'WRITE', 'ALLOW', 'ROLE', 'invoicing');

View File

@ -1 +0,0 @@
Alter table `vn`.`expedition` RENAME COLUMN itemFk TO itemFk__;

View File

@ -1,8 +0,0 @@
ALTER TABLE
`vn`.`client`
ADD
COLUMN `hasElectronicInvoice` TINYINT(1) NOT NULL DEFAULT 0 COMMENT 'Registro de facturas mediante FACe'
AFTER
`hasInvoiceSimplified`;
-- sería más correcto hasElectronicInvoice pero ya existe un campo hasInvoiceSimplified

View File

@ -1 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`collection_missingTrash`;

View File

@ -1 +0,0 @@
DROP TABLE `vn`.`invoiceOutQueue`;

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Sale', 'editTracked', 'WRITE', 'ALLOW', 'ROLE', 'production'),
('Sale', 'editFloramondo', 'WRITE', 'ALLOW', 'ROLE', 'salesAssistant');

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`greuge` CHANGE `userFK` `userFk` int(10) unsigned DEFAULT NULL NULL;

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('Receipt', 'balanceCompensationEmail', 'WRITE', 'ALLOW', 'ROLE', 'employee'),
('Receipt', 'balanceCompensationPdf', 'READ', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,8 +0,0 @@
ALTER TABLE `vn`.`osTicketConfig` DROP COLUMN `action`;
ALTER TABLE `vn`.`osTicketConfig` ADD responseType varchar(100) NULL;
ALTER TABLE `vn`.`osTicketConfig` ADD fromEmailId INT NULL;
ALTER TABLE `vn`.`osTicketConfig` ADD replyTo varchar(100) NULL;
UPDATE `vn`.`osTicketConfig`
SET responseType='reply', fromEmailId=5, replyTo='all'
WHERE id=0;

View File

@ -1,14 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`ticket_canMerge`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_canMerge`(vDated DATE, vScopeDays INT, vLitersMax INT, vLinesMax INT, vWarehouseFk INT)
BEGIN
CALL vn.ticket_canbePostponed(vDated,TIMESTAMPADD(DAY, vScopeDays, vDated),vLitersMax,vLinesMax,vWarehouseFk);
END $$
DELIMITER ;
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('Ticket', 'getTicketsFuture', 'READ', 'ALLOW', 'ROLE', 'employee'),
('Ticket', 'merge', 'WRITE', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,79 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`ticket_canbePostponed`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_canbePostponed`(vOriginDated DATE, vFutureDated DATE, vLitersMax INT, vLinesMax INT, vWarehouseFk INT)
BEGIN
/**
* Devuelve un listado de tickets susceptibles de fusionarse con otros tickets en el futuro
*
* @param vOriginDated Fecha en cuestión
* @param vFutureDated Fecha en el futuro a sondear
* @param vLitersMax Volumen máximo de los tickets a catapultar
* @param vLinesMax Número máximo de lineas de los tickets a catapultar
* @param vWarehouseFk Identificador de vn.warehouse
*/
DROP TEMPORARY TABLE IF EXISTS tmp.filter;
CREATE TEMPORARY TABLE tmp.filter
(INDEX (id))
SELECT sv.ticketFk id,
GROUP_CONCAT(DISTINCT i.itemPackingTypeFk ORDER BY i.itemPackingTypeFk) ipt,
CAST(sum(litros) AS DECIMAL(10,0)) liters,
CAST(count(*) AS DECIMAL(10,0)) `lines`,
st.name state,
sub2.id ticketFuture,
t.landed originETD,
sub2.landed destETD,
sub2.iptd tfIpt,
sub2.state tfState,
t.clientFk,
t.warehouseFk,
ts.alertLevel,
t.shipped,
sub2.shipped tfShipped,
t.workerFk,
st.code code,
sub2.code tfCode
FROM vn.saleVolume sv
JOIN vn.sale s ON s.id = sv.saleFk
JOIN vn.item i ON i.id = s.itemFk
JOIN vn.ticket t ON t.id = sv.ticketFk
JOIN vn.address a ON a.id = t.addressFk
JOIN vn.province p ON p.id = a.provinceFk
JOIN vn.country c ON c.id = p.countryFk
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.alertLevel al ON al.id = ts.alertLevel
LEFT JOIN vn.ticketParking tp ON tp.ticketFk = t.id
LEFT JOIN (
SELECT *
FROM (
SELECT
t.addressFk ,
t.id,
t.landed,
t.shipped,
st.name state,
st.code code,
GROUP_CONCAT(DISTINCT i.itemPackingTypeFk ORDER BY i.itemPackingTypeFk) iptd
FROM vn.ticket t
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.sale s ON s.ticketFk = t.id
JOIN vn.item i ON i.id = s.itemFk
WHERE t.shipped BETWEEN vFutureDated
AND util.dayend(vFutureDated)
AND t.warehouseFk = vWarehouseFk
GROUP BY t.id
) sub
GROUP BY sub.addressFk
) sub2 ON sub2.addressFk = t.addressFk AND t.id != sub2.id
WHERE t.shipped BETWEEN vOriginDated AND util.dayend(vOriginDated)
AND t.warehouseFk = vWarehouseFk
AND al.code = 'FREE'
AND tp.ticketFk IS NULL
GROUP BY sv.ticketFk
HAVING liters <= IFNULL(vLitersMax, 9999) AND `lines` <= IFNULL(vLinesMax, 9999) AND ticketFuture;
END$$
DELIMITER ;

View File

@ -1,88 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`timeBusiness_calculate`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`timeBusiness_calculate`(vDatedFrom DATETIME, vDatedTo DATETIME)
BEGIN
/**
* Horas que debe trabajar un empleado según contrato y día.
* @param vDatedFrom workerTimeControl
* @param vDatedTo workerTimeControl
* @table tmp.user(userFk)
* @return tmp.timeBusinessCalculate
*/
DROP TEMPORARY TABLE IF EXISTS tmp.timeBusinessCalculate;
CREATE TEMPORARY TABLE tmp.timeBusinessCalculate
(INDEX (departmentFk))
SELECT dated,
businessFk,
userFk,
departmentFk,
hourStart,
hourEnd,
timeTable,
timeWorkSeconds,
SEC_TO_TIME(timeWorkSeconds) timeWorkSexagesimal,
timeWorkSeconds / 3600 timeWorkDecimal,
timeWorkSeconds timeBusinessSeconds,
SEC_TO_TIME(timeWorkSeconds) timeBusinessSexagesimal,
timeWorkSeconds / 3600 timeBusinessDecimal,
name type,
permissionRate,
hoursWeek,
discountRate,
isAllowedToWork
FROM(SELECT t.dated,
b.id businessFk,
w.id,
b.departmentFk,
IF(j.start = NULL, NULL, GROUP_CONCAT(DISTINCT LEFT(j.start,5) ORDER BY j.start ASC SEPARATOR ' - ')) hourStart ,
IF(j.start = NULL, NULL, GROUP_CONCAT(DISTINCT LEFT(j.end,5) ORDER BY j.end ASC SEPARATOR ' - ')) hourEnd,
IF(j.start = NULL, NULL, GROUP_CONCAT(DISTINCT LEFT(j.start,5), " - ", LEFT(j.end,5) ORDER BY j.end ASC SEPARATOR ' - ')) timeTable,
IF(j.start = NULL, 0, IFNULL(SUM(TIME_TO_SEC(j.end)) - SUM(TIME_TO_SEC(j.start)), 0)) timeWorkSeconds,
at2.name,
at2.permissionRate,
at2.discountRate,
cl.hours_week hoursWeek,
at2.isAllowedToWork
FROM time t
LEFT JOIN business b ON t.dated BETWEEN b.started AND IFNULL(b.ended, vDatedTo)
LEFT JOIN worker w ON w.id = b.workerFk
JOIN tmp.`user` u ON u.userFK = w.id
LEFT JOIN workCenter wc ON wc.id = b.workcenterFK
LEFT JOIN postgresql.calendar_labour_type cl ON cl.calendar_labour_type_id = b.calendarTypeFk
LEFT JOIN postgresql.journey j ON j.business_id = b.id AND j.day_id = WEEKDAY(t.dated) + 1
LEFT JOIN postgresql.calendar_employee ce ON ce.businessFk = b.id AND ce.date = t.dated
LEFT JOIN absenceType at2 ON at2.id = ce.calendar_state_id
WHERE t.dated BETWEEN vDatedFrom AND vDatedTo
GROUP BY w.id, t.dated
)sub;
UPDATE tmp.timeBusinessCalculate t
LEFT JOIN postgresql.journey j ON j.business_id = t.businessFk
SET t.timeWorkSeconds = t.hoursWeek / 5 * 3600,
t.timeWorkSexagesimal = SEC_TO_TIME( t.hoursWeek / 5 * 3600),
t.timeWorkDecimal = t.hoursWeek / 5,
t.timeBusinessSeconds = t.hoursWeek / 5 * 3600,
t.timeBusinessSexagesimal = SEC_TO_TIME( t.hoursWeek / 5 * 3600),
t.timeBusinessDecimal = t.hoursWeek / 5
WHERE DAYOFWEEK(t.dated) IN(2,3,4,5,6) AND j.journey_id IS NULL ;
UPDATE tmp.timeBusinessCalculate t
SET t.timeWorkSeconds = t.timeWorkSeconds - (t.timeWorkSeconds * permissionRate) ,
t.timeWorkSexagesimal = SEC_TO_TIME ((t.timeWorkDecimal - (t.timeWorkDecimal * permissionRate)) * 3600),
t.timeWorkDecimal = t.timeWorkDecimal - (t.timeWorkDecimal * permissionRate)
WHERE permissionRate <> 0;
UPDATE tmp.timeBusinessCalculate t
JOIN calendarHolidays ch ON ch.dated = t.dated
JOIN business b ON b.id = t.businessFk
AND b.workcenterFk = ch.workcenterFk
SET t.timeWorkSeconds = 0,
t.timeWorkSexagesimal = 0,
t.timeWorkDecimal = 0,
t.permissionrate = 1,
t.type = 'Festivo'
WHERE t.type IS NULL;
END$$
DELIMITER ;

View File

@ -1,6 +0,0 @@
UPDATE
`vn`.`client`
SET
hasElectronicInvoice = TRUE
WHERE
businessTypeFk = 'officialOrganism';

View File

@ -1,4 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('MdbApp', 'lock', 'WRITE', 'ALLOW', 'ROLE', 'developer'),
('MdbApp', 'unlock', 'WRITE', 'ALLOW', 'ROLE', 'developer');

View File

@ -1,3 +0,0 @@
UPDATE `vn`.`collection`
SET sectorFk=1
WHERE id=1;

View File

@ -1 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`,`property`,`accessType`,`permission`,`principalId`) VALUES ('Dms','saveSign','*','ALLOW','employee');

View File

@ -1,3 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`, `property`, `accessType`, `permission`, `principalType`, `principalId`)
VALUES
('TicketLog', 'getChanges', 'READ', 'ALLOW', 'ROLE', 'employee');

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`entry` DROP COLUMN `ref`;

View File

@ -1,12 +0,0 @@
CREATE TABLE `vn`.`invoiceInConfig` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`retentionRate` int(3) NOT NULL,
`retentionName` varchar(25) NOT NULL,
`sageWithholdingFk` smallint(6) NOT NULL,
PRIMARY KEY (`id`),
CONSTRAINT `invoiceInConfig_sageWithholdingFk` FOREIGN KEY (`sageWithholdingFk`) REFERENCES `sage`.`TiposRetencion`(`CodigoRetencion`) ON DELETE CASCADE ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
INSERT INTO `vn`.`invoiceInConfig` (`id`, `retentionRate`, `retentionName`, `sageWithholdingFk`)
VALUES
(1, -2, 'Retención 2%', 2);

View File

@ -1,225 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`invoiceOut_new`;
DELIMITER $$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`invoiceOut_new`(
vSerial VARCHAR(255),
vInvoiceDate DATETIME,
vTaxArea VARCHAR(25),
OUT vNewInvoiceId INT)
BEGIN
/**
* Creación de facturas emitidas.
* requiere previamente tabla tmp.ticketToInvoice(id).
*
* @param vSerial serie a la cual se hace la factura
* @param vInvoiceDate fecha de la factura
* @param vTaxArea tipo de iva en relacion a la empresa y al cliente
* @param vNewInvoiceId id de la factura que se acaba de generar
* @return vNewInvoiceId
*/
DECLARE vSpainCountryCode INT DEFAULT 1;
DECLARE vIsAnySaleToInvoice BOOL;
DECLARE vIsAnyServiceToInvoice BOOL;
DECLARE vNewRef VARCHAR(255);
DECLARE vWorker INT DEFAULT account.myUser_getId();
DECLARE vCompany INT;
DECLARE vSupplier INT;
DECLARE vClient INT;
DECLARE vCplusStandardInvoiceTypeFk INT DEFAULT 1;
DECLARE vCplusCorrectingInvoiceTypeFk INT DEFAULT 6;
DECLARE vCplusSimplifiedInvoiceTypeFk INT DEFAULT 2;
DECLARE vCorrectingSerial VARCHAR(1) DEFAULT 'R';
DECLARE vSimplifiedSerial VARCHAR(1) DEFAULT 'S';
DECLARE vNewInvoiceInId INT;
DECLARE vIsInterCompany BOOL;
SET vInvoiceDate = IFNULL(vInvoiceDate,CURDATE());
SELECT t.clientFk, t.companyFk
INTO vClient, vCompany
FROM tmp.ticketToInvoice tt
JOIN ticket t ON t.id = tt.id
LIMIT 1;
-- Eliminem de tmp.ticketToInvoice els tickets que no han de ser facturats
DELETE ti.*
FROM tmp.ticketToInvoice ti
JOIN ticket t ON t.id = ti.id
JOIN sale s ON s.ticketFk = t.id
JOIN item i ON i.id = s.itemFk
JOIN supplier su ON su.id = t.companyFk
JOIN client c ON c.id = t.clientFk
LEFT JOIN itemTaxCountry itc ON itc.itemFk = i.id AND itc.countryFk = su.countryFk
WHERE YEAR(t.shipped) < 2001
OR c.isTaxDataChecked = FALSE
OR t.isDeleted
OR c.hasToInvoice = FALSE
OR itc.id IS NULL;
SELECT SUM(s.quantity * s.price * (100 - s.discount)/100), ts.id
INTO vIsAnySaleToInvoice, vIsAnyServiceToInvoice
FROM tmp.ticketToInvoice t
LEFT JOIN sale s ON s.ticketFk = t.id
LEFT JOIN ticketService ts ON ts.ticketFk = t.id;
IF (vIsAnySaleToInvoice OR vIsAnyServiceToInvoice)
AND (vCorrectingSerial = vSerial OR NOT hasAnyNegativeBase())
THEN
-- el trigger añade el siguiente Id_Factura correspondiente a la vSerial
INSERT INTO invoiceOut
(
ref,
serial,
issued,
clientFk,
dued,
companyFk,
siiTypeInvoiceOutFk
)
SELECT
1,
vSerial,
vInvoiceDate,
vClient,
getDueDate(vInvoiceDate, dueDay),
vCompany,
IF(vSerial = vCorrectingSerial,
vCplusCorrectingInvoiceTypeFk,
IF(vSerial = vSimplifiedSerial,
vCplusSimplifiedInvoiceTypeFk,
vCplusStandardInvoiceTypeFk))
FROM client
WHERE id = vClient;
SET vNewInvoiceId = LAST_INSERT_ID();
SELECT `ref`
INTO vNewRef
FROM invoiceOut
WHERE id = vNewInvoiceId;
UPDATE ticket t
JOIN tmp.ticketToInvoice ti ON ti.id = t.id
SET t.refFk = vNewRef;
DROP TEMPORARY TABLE IF EXISTS tmp.updateInter;
CREATE TEMPORARY TABLE tmp.updateInter ENGINE = MEMORY
SELECT s.id,ti.id ticket_id,vWorker Id_Trabajador
FROM tmp.ticketToInvoice ti
LEFT JOIN ticketState ts ON ti.id = ts.ticket
JOIN state s
WHERE IFNULL(ts.alertLevel,0) < 3 and s.`code` = getAlert3State(ti.id);
INSERT INTO vncontrol.inter(state_id,Id_Ticket,Id_Trabajador)
SELECT * FROM tmp.updateInter;
INSERT INTO ticketLog (action, userFk, originFk, description)
SELECT 'UPDATE', account.myUser_getId(), ti.id, CONCAT('Crea factura ', vNewRef)
FROM tmp.ticketToInvoice ti;
CALL invoiceExpenseMake(vNewInvoiceId);
CALL invoiceTaxMake(vNewInvoiceId,vTaxArea);
UPDATE invoiceOut io
JOIN (
SELECT SUM(amount) AS total
FROM invoiceOutExpense
WHERE invoiceOutFk = vNewInvoiceId
) base
JOIN (
SELECT SUM(vat) AS total
FROM invoiceOutTax
WHERE invoiceOutFk = vNewInvoiceId
) vat
SET io.amount = base.total + vat.total
WHERE io.id = vNewInvoiceId;
DROP TEMPORARY TABLE tmp.updateInter;
SELECT ios.isCEE INTO vIsInterCompany
FROM vn.ticket t
JOIN vn.invoiceOut io ON io.`ref` = t.refFk
JOIN vn.invoiceOutSerial ios ON ios.code = io.serial
WHERE t.refFk = vNewRef
LIMIT 1;
IF (vIsInterCompany) THEN
SELECT vCompany INTO vSupplier;
SELECT id INTO vCompany FROM company WHERE clientFk = vClient;
INSERT INTO invoiceIn(supplierFk, supplierRef, issued, companyFk)
SELECT vSupplier, vNewRef, vInvoiceDate, vCompany;
SET vNewInvoiceInId = LAST_INSERT_ID();
DROP TEMPORARY TABLE IF EXISTS tmp.ticket;
CREATE TEMPORARY TABLE tmp.ticket
(KEY (ticketFk))
ENGINE = MEMORY
SELECT id ticketFk
FROM tmp.ticketToInvoice;
CALL `ticket_getTax`('NATIONAL');
SET @vTaxableBaseServices := 0.00;
SET @vTaxCodeGeneral := NULL;
INSERT INTO vn.invoiceInTax(invoiceInFk, taxableBase, expenseFk, taxTypeSageFk, transactionTypeSageFk)
SELECT vNewInvoiceInId, @vTaxableBaseServices, sub.expenseFk, sub.taxTypeSageFk , sub.transactionTypeSageFk
FROM (
SELECT @vTaxableBaseServices := SUM(tst.taxableBase) taxableBase, i.expenseFk, i.taxTypeSageFk , i.transactionTypeSageFk, @vTaxCodeGeneral := i.taxClassCodeFk
FROM tmp.ticketServiceTax tst
JOIN vn.invoiceOutTaxConfig i ON i.taxClassCodeFk = tst.code
WHERE i.isService
HAVING taxableBase
) sub;
INSERT INTO vn.invoiceInTax(invoiceInFk, taxableBase, expenseFk, taxTypeSageFk, transactionTypeSageFk)
SELECT vNewInvoiceInId, SUM(tt.taxableBase) - IF(tt.code = @vTaxCodeGeneral, @vTaxableBaseServices, 0) taxableBase, i.expenseFk, i.taxTypeSageFk , i.transactionTypeSageFk
FROM tmp.ticketTax tt
JOIN vn.invoiceOutTaxConfig i ON i.taxClassCodeFk = tt.code
WHERE !i.isService
GROUP BY tt.pgcFk
HAVING taxableBase
ORDER BY tt.priority;
CALL invoiceInDueDay_calculate(vNewInvoiceInId);
INSERT INTO invoiceInIntrastat (
invoiceInFk,
intrastatFk,
amount,
stems,
countryFk,
net)
SELECT
vNewInvoiceInId invoiceInFk,
i.intrastatFk,
CAST(SUM((s.quantity * s.price * (100 - s.discount) / 100 )) AS DECIMAL(10,2)) subtotal,
CAST(SUM(IFNULL(i.stems, 1) * s.quantity) AS DECIMAL(10,2)) stems,
su.countryFk,
CAST(SUM(IFNULL(i.stems, 1)
* s.quantity
* IF(ic.grams, ic.grams, i.weightByPiece) / 1000) AS DECIMAL(10,2)) netKg
FROM sale s
JOIN ticket t ON s.ticketFk = t.id
JOIN supplier su ON su.id = t.companyFk
JOIN item i ON i.id = s.itemFk
JOIN vn.itemCost ic ON ic.itemFk = i.id AND ic.warehouseFk = t.warehouseFk
JOIN intrastat ir ON ir.id = i.intrastatFk
WHERE t.refFk = vNewRef;
DROP TEMPORARY TABLE tmp.ticket;
DROP TEMPORARY TABLE tmp.ticketAmount;
DROP TEMPORARY TABLE tmp.ticketTax;
DROP TEMPORARY TABLE tmp.ticketServiceTax;
END IF;
END IF;
DROP TEMPORARY TABLE `tmp`.`ticketToInvoice`;
END$$
DELIMITER ;

View File

@ -1,11 +0,0 @@
CREATE TABLE `vn`.`mdbApp` (
`app` varchar(100) COLLATE utf8mb3_unicode_ci NOT NULL,
`baselineBranchFk` varchar(255) COLLATE utf8mb3_unicode_ci DEFAULT NULL,
`userFk` int(10) unsigned DEFAULT NULL,
`locked` datetime DEFAULT NULL,
PRIMARY KEY (`app`),
KEY `mdbApp_FK` (`userFk`),
KEY `mdbApp_FK_1` (`baselineBranchFk`),
CONSTRAINT `mdbApp_FK` FOREIGN KEY (`userFk`) REFERENCES `account`.`user` (`id`) ON DELETE SET NULL ON UPDATE CASCADE,
CONSTRAINT `mdbApp_FK_1` FOREIGN KEY (`baselineBranchFk`) REFERENCES `mdbBranch` (`name`) ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci

View File

@ -1,24 +0,0 @@
DROP FUNCTION IF EXISTS `util`.`notification_send`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` FUNCTION `util`.`notification_send`(vNotificationName VARCHAR(255), vParams TEXT, vAuthorFk INT) RETURNS int(11)
MODIFIES SQL DATA
BEGIN
/**
* Sends a notification.
*
* @param vNotificationName The notification name
* @param vParams The notification parameters formatted as JSON
* @param vAuthorFk The notification author or %NULL if there is no author
* @return The notification id
*/
INSERT INTO notificationQueue
SET notificationFk = vNotificationName,
params = vParams,
authorFk = vAuthorFk;
RETURN LAST_INSERT_ID();
END$$
DELIMITER ;

View File

@ -1,8 +0,0 @@
CREATE TABLE `vn`.`ticketSms` (
`smsFk` mediumint(8) unsigned NOT NULL,
`ticketFk` int(11) DEFAULT NULL,
PRIMARY KEY (`smsFk`),
KEY `ticketSms_FK_1` (`ticketFk`),
CONSTRAINT `ticketSms_FK` FOREIGN KEY (`smsFk`) REFERENCES `sms` (`id`) ON UPDATE CASCADE,
CONSTRAINT `ticketSms_FK_1` FOREIGN KEY (`ticketFk`) REFERENCES `ticket` (`id`) ON UPDATE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci

View File

@ -1,104 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`ticket_canAdvance`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_canAdvance`(vDateFuture DATE, vDateToAdvance DATE, vWarehouseFk INT)
BEGIN
/**
* Devuelve los tickets y la cantidad de lineas de venta que se pueden adelantar.
*
* @param vDateFuture Fecha de los tickets que se quieren adelantar.
* @param vDateToAdvance Fecha a cuando se quiere adelantar.
* @param vWarehouseFk Almacén
*/
DECLARE vDateInventory DATE;
SELECT inventoried INTO vDateInventory FROM vn.config;
DROP TEMPORARY TABLE IF EXISTS tmp.stock;
CREATE TEMPORARY TABLE tmp.stock
(itemFk INT PRIMARY KEY,
amount INT)
ENGINE = MEMORY;
INSERT INTO tmp.stock(itemFk, amount)
SELECT itemFk, SUM(quantity) amount FROM
(
SELECT itemFk, quantity
FROM vn.itemTicketOut
WHERE shipped >= vDateInventory
AND shipped < vDateFuture
AND warehouseFk = vWarehouseFk
UNION ALL
SELECT itemFk, quantity
FROM vn.itemEntryIn
WHERE landed >= vDateInventory
AND landed < vDateFuture
AND isVirtualStock = FALSE
AND warehouseInFk = vWarehouseFk
UNION ALL
SELECT itemFk, quantity
FROM vn.itemEntryOut
WHERE shipped >= vDateInventory
AND shipped < vDateFuture
AND warehouseOutFk = vWarehouseFk
) t
GROUP BY itemFk HAVING amount != 0;
DROP TEMPORARY TABLE IF EXISTS tmp.filter;
CREATE TEMPORARY TABLE tmp.filter
(INDEX (id))
SELECT s.ticketFk futureId,
t2.ticketFk id,
sum((s.quantity <= IFNULL(st.amount,0))) hasStock,
count(DISTINCT s.id) saleCount,
t2.state,
t2.stateCode,
st.name futureState,
st.code futureStateCode,
GROUP_CONCAT(DISTINCT ipt.code ORDER BY ipt.code) futureIpt,
t2.ipt,
t.workerFk,
CAST(sum(litros) AS DECIMAL(10,0)) liters,
CAST(count(*) AS DECIMAL(10,0)) `lines`,
t2.shipped,
t.shipped futureShipped,
t2.totalWithVat,
t.totalWithVat futureTotalWithVat
FROM vn.ticket t
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.saleVolume sv ON t.id = sv.ticketFk
JOIN (SELECT
t2.id ticketFk,
t2.addressFk,
st.name state,
st.code stateCode,
GROUP_CONCAT(DISTINCT ipt.code ORDER BY ipt.code) ipt,
t2.shipped,
t2.totalWithVat
FROM vn.ticket t2
JOIN vn.sale s ON s.ticketFk = t2.id
JOIN vn.item i ON i.id = s.itemFk
JOIN vn.ticketState ts ON ts.ticketFk = t2.id
JOIN vn.state st ON st.id = ts.stateFk
LEFT JOIN vn.itemPackingType ipt ON ipt.code = i.itemPackingTypeFk
WHERE t2.shipped BETWEEN vDateToAdvance AND util.dayend(vDateToAdvance)
AND t2.warehouseFk = vWarehouseFk
GROUP BY t2.id) t2 ON t2.addressFk = t.addressFk
JOIN vn.sale s ON s.ticketFk = t.id
JOIN vn.item i ON i.id = s.itemFk
LEFT JOIN vn.itemPackingType ipt ON ipt.code = i.itemPackingTypeFk
LEFT JOIN tmp.stock st ON st.itemFk = s.itemFk
WHERE t.shipped BETWEEN vDateFuture AND util.dayend(vDateFuture)
AND t.warehouseFk = vWarehouseFk
GROUP BY t.id;
DROP TEMPORARY TABLE tmp.stock;
END$$
DELIMITER ;
INSERT INTO `salix`.`ACL` (model, property, accessType, permission, principalType, principalId)
VALUES
('Ticket', 'getTicketsAdvance', 'READ', 'ALLOW', 'ROLE', 'employee');

View File

@ -1,73 +0,0 @@
DROP PROCEDURE IF EXISTS `vn`.`ticket_canbePostponed`;
DELIMITER $$
$$
CREATE DEFINER=`root`@`localhost` PROCEDURE `vn`.`ticket_canbePostponed`(vOriginDated DATE, vFutureDated DATE, vWarehouseFk INT)
BEGIN
/**
* Devuelve un listado de tickets susceptibles de fusionarse con otros tickets en el futuro
*
* @param vOriginDated Fecha en cuestión
* @param vFutureDated Fecha en el futuro a sondear
* @param vWarehouseFk Identificador de vn.warehouse
*/
DROP TEMPORARY TABLE IF EXISTS tmp.filter;
CREATE TEMPORARY TABLE tmp.filter
(INDEX (id))
SELECT sv.ticketFk id,
sub2.id futureId,
GROUP_CONCAT(DISTINCT i.itemPackingTypeFk ORDER BY i.itemPackingTypeFk) ipt,
CAST(sum(litros) AS DECIMAL(10,0)) liters,
CAST(count(*) AS DECIMAL(10,0)) `lines`,
st.name state,
sub2.iptd futureIpt,
sub2.state futureState,
t.clientFk,
t.warehouseFk,
ts.alertLevel,
t.shipped,
sub2.shipped futureShipped,
t.workerFk,
st.code stateCode,
sub2.code futureStateCode
FROM vn.saleVolume sv
JOIN vn.sale s ON s.id = sv.saleFk
JOIN vn.item i ON i.id = s.itemFk
JOIN vn.ticket t ON t.id = sv.ticketFk
JOIN vn.address a ON a.id = t.addressFk
JOIN vn.province p ON p.id = a.provinceFk
JOIN vn.country c ON c.id = p.countryFk
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.alertLevel al ON al.id = ts.alertLevel
LEFT JOIN vn.ticketParking tp ON tp.ticketFk = t.id
LEFT JOIN (
SELECT *
FROM (
SELECT
t.addressFk,
t.id,
t.shipped,
st.name state,
st.code code,
GROUP_CONCAT(DISTINCT i.itemPackingTypeFk ORDER BY i.itemPackingTypeFk) iptd
FROM vn.ticket t
JOIN vn.ticketState ts ON ts.ticketFk = t.id
JOIN vn.state st ON st.id = ts.stateFk
JOIN vn.sale s ON s.ticketFk = t.id
JOIN vn.item i ON i.id = s.itemFk
WHERE t.shipped BETWEEN vFutureDated
AND util.dayend(vFutureDated)
AND t.warehouseFk = vWarehouseFk
GROUP BY t.id
) sub
GROUP BY sub.addressFk
) sub2 ON sub2.addressFk = t.addressFk AND t.id != sub2.id
WHERE t.shipped BETWEEN vOriginDated AND util.dayend(vOriginDated)
AND t.warehouseFk = vWarehouseFk
AND al.code = 'FREE'
AND tp.ticketFk IS NULL
GROUP BY sv.ticketFk
HAVING futureId;
END$$
DELIMITER ;

View File

@ -1,2 +0,0 @@
DROP PROCEDURE IF EXISTS `ticket_split`;
DROP PROCEDURE IF EXISTS `ticket_merge`;

View File

@ -1,4 +0,0 @@
INSERT INTO `util`.`notification` (id, name, description) VALUES(3, 'book-entries-imported-incorrectly', 'accounting entries exported incorrectly');
INSERT INTO `util`.`notificationAcl` (notificationFk, roleFk) VALUES(3, 5);
INSERT IGNORE INTO `util`.`notificationSubscription` (notificationFk, userFk) VALUES(3, 19663);

View File

@ -1,7 +0,0 @@
CREATE TABLE `vn`.`stateI18n` (
`stateFk` tinyint(3) unsigned NOT NULL,
`lang` char(2) NOT NULL,
`name` varchar(255) NOT NULL,
PRIMARY KEY (`stateFk`, `lang`),
CONSTRAINT `stateI18n_state_id` FOREIGN KEY (`stateFk`) REFERENCES `vn`.`state` (`id`)
) ENGINE = InnoDB DEFAULT CHARSET = utf8;

View File

@ -1,60 +0,0 @@
UPDATE salix.module t
SET t.code = 'supplier'
WHERE t.code LIKE 'Suppliers' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'travel'
WHERE t.code LIKE 'Travels' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'ticket'
WHERE t.code LIKE 'Tickets' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'zone'
WHERE t.code LIKE 'Zones' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'monitor'
WHERE t.code LIKE 'Monitors' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'entry'
WHERE t.code LIKE 'Entries' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'invoiceIn'
WHERE t.code LIKE 'Invoices in' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'customer'
WHERE t.code LIKE 'Clients' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'route'
WHERE t.code LIKE 'Routes' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'item'
WHERE t.code LIKE 'Items' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'claim'
WHERE t.code LIKE 'Claims' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'account'
WHERE t.code LIKE 'Users' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'invoiceOut'
WHERE t.code LIKE 'Invoices out' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'order'
WHERE t.code LIKE 'Orders' ESCAPE '#';
UPDATE salix.module t
SET t.code = 'worker'
WHERE t.code LIKE 'Workers' ESCAPE '#';

View File

@ -1,73 +0,0 @@
INSERT INTO
`vn`.`stateI18n` (`stateFk`, `lang`, `name`)
VALUES
(1, 'en', 'Fix'),
(1, 'es', 'Arreglar'),
(2, 'en', 'Free'),
(2, 'es', 'Libre'),
(3, 'en', 'OK'),
(3, 'es', 'OK'),
(4, 'en', 'Printed'),
(4, 'es', 'Impreso'),
(5, 'en', 'Preparation'),
(5, 'es', 'Preparación'),
(6, 'en', 'In Review'),
(6, 'es', 'En Revisión'),
(7, 'en', 'Unfinished'),
(7, 'es', 'Sin Acabar'),
(8, 'en', 'Reviewed'),
(8, 'es', 'Revisado'),
(9, 'en', 'Fitting'),
(9, 'es', 'Encajando'),
(10, 'en', 'Fitted'),
(10, 'es', 'Encajado'),
(11, 'en', 'Billed'),
(11, 'es', 'Facturado'),
(12, 'en', 'Blocked'),
(12, 'es', 'Bloqueado'),
(13, 'en', 'In Delivery'),
(13, 'es', 'En Reparto'),
(14, 'en', 'Prepared'),
(14, 'es', 'Preparado'),
(15, 'en', 'Pending Collection'),
(15, 'es', 'Pendiente de Recogida'),
(16, 'en', 'Delivered'),
(16, 'es', 'Entregado'),
(20, 'en', 'Assigned'),
(20, 'es', 'Asignado'),
(21, 'en', 'Returned'),
(21, 'es', 'Retornado'),
(22, 'en', 'Pending to extend'),
(22, 'es', 'Pendiente ampliar'),
(23, 'en', 'URGENT'),
(23, 'es', 'URGENTE'),
(24, 'en', 'Chained'),
(24, 'es', 'Encadenado'),
(25, 'en', 'Shipping'),
(25, 'es', 'Embarcando'),
(26, 'en', 'Preparation'),
(26, 'es', 'Preparación previa'),
(27, 'en', 'Assisted preparation'),
(27, 'es', 'Preparación asistida'),
(28, 'en', 'Preparation OK'),
(28, 'es', 'Previa OK'),
(29, 'en', 'Preparation Printed'),
(29, 'es', 'Previa Impreso'),
(30, 'en', 'Shipped'),
(30, 'es', 'Embarcado'),
(31, 'en', 'Stowaway printed'),
(31, 'es', 'Polizón Impreso'),
(32, 'en', 'Stowaway OK'),
(32, 'es', 'Polizón OK'),
(33, 'en', 'Auto_Printed'),
(33, 'es', 'Auto_Impreso'),
(34, 'en', 'Pending payment'),
(34, 'es', 'Pendiente de pago'),
(35, 'en', 'Half-Embedded'),
(35, 'es', 'Semi-Encajado'),
(36, 'en', 'Preparation Reviewing'),
(36, 'es', 'Previa Revisando'),
(37, 'en', 'Preparation Reviewed'),
(37, 'es', 'Previa Revisado'),
(38, 'en', 'Preparation Chamber'),
(38, 'es', 'Preparación Cámara');

View File

@ -1,16 +0,0 @@
UPDATE `vn`.starredModule SET moduleFk = 'customer' WHERE moduleFk = 'Clients';
UPDATE `vn`.starredModule SET moduleFk = 'ticket' WHERE moduleFk = 'Tickets';
UPDATE `vn`.starredModule SET moduleFk = 'route' WHERE moduleFk = 'Routes';
UPDATE `vn`.starredModule SET moduleFk = 'zone' WHERE moduleFk = 'Zones';
UPDATE `vn`.starredModule SET moduleFk = 'order' WHERE moduleFk = 'Orders';
UPDATE `vn`.starredModule SET moduleFk = 'claim' WHERE moduleFk = 'Claims';
UPDATE `vn`.starredModule SET moduleFk = 'item' WHERE moduleFk = 'Items';
UPDATE `vn`.starredModule SET moduleFk = 'worker' WHERE moduleFk = 'Workers';
UPDATE `vn`.starredModule SET moduleFk = 'entry' WHERE moduleFk = 'Entries';
UPDATE `vn`.starredModule SET moduleFk = 'invoiceOut' WHERE moduleFk = 'Invoices out';
UPDATE `vn`.starredModule SET moduleFk = 'invoiceIn' WHERE moduleFk = 'Invoices in';
UPDATE `vn`.starredModule SET moduleFk = 'monitor' WHERE moduleFk = 'Monitors';
UPDATE `vn`.starredModule SET moduleFk = 'user' WHERE moduleFk = 'Users';
UPDATE `vn`.starredModule SET moduleFk = 'supplier' WHERE moduleFk = 'Suppliers';
UPDATE `vn`.starredModule SET moduleFk = 'travel' WHERE moduleFk = 'Travels';
UPDATE `vn`.starredModule SET moduleFk = 'shelving' WHERE moduleFk = 'Shelvings';

View File

@ -1,28 +0,0 @@
ALTER TABLE `vn`.`mdbApp` DROP PRIMARY KEY;
ALTER TABLE `vn`.`mdbApp` ADD CONSTRAINT mdbApp_PK PRIMARY KEY (app,baselineBranchFk);
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('com','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('enc','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('ent','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('eti','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('lab','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('tpv','master');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('com','dev');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('enc','dev');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('ent','dev');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('eti','dev');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('lab','dev');
INSERT INTO `vn`.`mdbApp` (app,baselineBranchFk)
VALUES ('tpv','dev');

View File

@ -1,5 +0,0 @@
UPDATE `vn`.`osTicketConfig`
SET oldStatus='1,6'
WHERE id=0;

View File

@ -1,2 +0,0 @@
INSERT INTO `salix`.`ACL` (`model`,`property`,`accessType`,`permission`,`principalId`)
VALUES ('ItemShelvingSale','*','*','ALLOW','employee');

View File

@ -1 +0,0 @@
ALTER TABLE `vn`.`supplier` ADD UNIQUE (name, countryFk);

View File

@ -1,4 +0,0 @@
SET FOREIGN_KEY_CHECKS = 0;
ALTER TABLE `vn`.`report` MODIFY COLUMN id tinyint(3) unsigned NOT NULL AUTO_INCREMENT;
ALTER TABLE `vn`.`printer` MODIFY COLUMN id tinyint(3) unsigned NOT NULL AUTO_INCREMENT;
SET FOREIGN_KEY_CHECKS = 1;

View File

@ -1,3 +0,0 @@
ALTER TABLE `vn`.`itemPackingType` ADD isActive BOOLEAN NOT NULL;
UPDATE `vn`.`itemPackingType` SET isActive = 0 WHERE code IN ('P', 'F');
UPDATE `vn`.`itemPackingType` SET isActive = 1 WHERE code IN ('V', 'H');

View File

@ -1,23 +0,0 @@
CREATE OR REPLACE TABLE `vn`.`docuware` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`code` varchar(50) COLLATE utf8mb3_unicode_ci NOT NULL,
`fileCabinetName` varchar(50) COLLATE utf8mb3_unicode_ci NOT NULL,
`action` varchar(255) COLLATE utf8mb3_unicode_ci NOT NULL,
`dialogName` varchar(100) COLLATE utf8mb3_unicode_ci NOT NULL,
`findById` varchar(50) COLLATE utf8mb3_unicode_ci DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3 COLLATE=utf8mb3_unicode_ci;
INSERT INTO `vn`.`docuware` (`code`, `fileCabinetName`, `action`, `dialogName`, `findById`)
VALUES
('deliveryNote', 'Albaranes cliente', 'find', 'find', 'N__ALBAR_N'),
('deliveryNote', 'Albaranes cliente', 'store', 'Archivar', 'N__ALBAR_N');
INSERT INTO `salix`.`ACL` (`model`,`property`,`accessType`,`permission`,`principalId`)
VALUES
('Docuware','checkFile','READ','ALLOW','employee'),
('Docuware','download','READ','ALLOW','salesPerson'),
('Docuware','upload','WRITE','ALLOW','productionAssi'),
('Docuware','deliveryNoteEmail','WRITE','ALLOW','salesPerson');
ALTER TABLE `vn`.`docuwareConfig` CHANGE token cookie varchar(1000) CHARACTER SET utf8mb3 COLLATE utf8mb3_unicode_ci DEFAULT NULL NULL;

View File

@ -1,5 +0,0 @@
ALTER TABLE `vn`.`itemConfig` ADD defaultTag INT DEFAULT 56 NOT NULL;
ALTER TABLE `vn`.`itemConfig` ADD CONSTRAINT itemConfig_FK FOREIGN KEY (defaultTag) REFERENCES vn.tag(id);
ALTER TABLE `vn`.`itemConfig` ADD validPriorities varchar(50) DEFAULT '[1,2,3]' NOT NULL;
ALTER TABLE `vn`.`itemConfig` ADD defaultPriority INT DEFAULT 2 NOT NULL;
ALTER TABLE `vn`.`item` MODIFY COLUMN relevancy tinyint(1) DEFAULT 0 NOT NULL COMMENT 'La web ordena de forma descendiente por este campo para mostrar los artículos';

Some files were not shown because too many files have changed in this diff Show More