Reformat code
This commit is contained in:
parent
59676592e0
commit
e54aa56cee
12
docs.json
12
docs.json
|
@ -1,8 +1,14 @@
|
|||
{
|
||||
"content": [
|
||||
{"title": "LoopBack MySQL Connector API", "depth": 2},
|
||||
"content": [
|
||||
{
|
||||
"title": "LoopBack MySQL Connector API",
|
||||
"depth": 2
|
||||
},
|
||||
"lib/mysql.js",
|
||||
{"title": "MySQL Discovery API", "depth": 2},
|
||||
{
|
||||
"title": "MySQL Discovery API",
|
||||
"depth": 2
|
||||
},
|
||||
"lib/discovery.js"
|
||||
],
|
||||
"codeSectionDepth": 3
|
||||
|
|
|
@ -5,19 +5,18 @@ var config = require('rc')('loopback', {dev: {mysql: {}}}).dev.mysql;
|
|||
var ds = new DataSource(require('../'), config);
|
||||
|
||||
function show(err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
} else {
|
||||
console.log(models);
|
||||
if (models) {
|
||||
models.forEach(function (m) {
|
||||
console.dir(m);
|
||||
});
|
||||
}
|
||||
if (err) {
|
||||
console.error(err);
|
||||
} else {
|
||||
console.log(models);
|
||||
if (models) {
|
||||
models.forEach(function (m) {
|
||||
console.dir(m);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
ds.discoverModelDefinitions({views: true, limit: 20}, show);
|
||||
|
||||
ds.discoverModelProperties('customer', show);
|
||||
|
@ -25,16 +24,15 @@ ds.discoverModelProperties('customer', show);
|
|||
ds.discoverModelProperties('location', {owner: 'strongloop'}, show);
|
||||
|
||||
ds.discoverPrimaryKeys('customer', show);
|
||||
ds.discoverForeignKeys('inventory', show);
|
||||
|
||||
ds.discoverExportedForeignKeys('location', show);
|
||||
ds.discoverForeignKeys('inventory', show);
|
||||
|
||||
ds.discoverExportedForeignKeys('location', show);
|
||||
|
||||
ds.discoverAndBuildModels('weapon', {owner: 'strongloop', visited: {}, associations: true}, function (err, models) {
|
||||
|
||||
for (var m in models) {
|
||||
models[m].all(show);
|
||||
}
|
||||
for (var m in models) {
|
||||
models[m].all(show);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
|
662
lib/discovery.js
662
lib/discovery.js
|
@ -1,357 +1,357 @@
|
|||
module.exports = mixinDiscovery;
|
||||
|
||||
function mixinDiscovery(MySQL) {
|
||||
var async = require('async');
|
||||
var async = require('async');
|
||||
|
||||
function paginateSQL(sql, orderBy, options) {
|
||||
options = options || {};
|
||||
var limit = '';
|
||||
if (options.offset || options.skip || options.limit) {
|
||||
limit = ' LIMIT ' + (options.offset || options.skip || 0); // Offset starts from 0
|
||||
if (options.limit) {
|
||||
limit = limit + ',' + options.limit;
|
||||
}
|
||||
}
|
||||
if(!orderBy) {
|
||||
sql += ' ORDER BY ' + orderBy;
|
||||
}
|
||||
return sql + limit;
|
||||
function paginateSQL(sql, orderBy, options) {
|
||||
options = options || {};
|
||||
var limit = '';
|
||||
if (options.offset || options.skip || options.limit) {
|
||||
limit = ' LIMIT ' + (options.offset || options.skip || 0); // Offset starts from 0
|
||||
if (options.limit) {
|
||||
limit = limit + ',' + options.limit;
|
||||
}
|
||||
}
|
||||
|
||||
/*!
|
||||
* Build sql for listing tables
|
||||
* @param options {all: for all owners, owner: for a given owner}
|
||||
* @returns {string} The sql statement
|
||||
*/
|
||||
function queryTables(options) {
|
||||
var sqlTables = null;
|
||||
var owner = options.owner || options.schema;
|
||||
|
||||
if (options.all && !owner) {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name", table_schema AS "owner"'
|
||||
+ ' FROM information_schema.tables', 'table_schema, table_name', options);
|
||||
} else if (owner) {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name", table_schema AS "owner"'
|
||||
+ ' FROM information_schema.tables WHERE table_schema=\'' + owner + '\'', 'table_schema, table_name', options);
|
||||
} else {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.tables',
|
||||
'table_name', options);
|
||||
}
|
||||
return sqlTables;
|
||||
if (!orderBy) {
|
||||
sql += ' ORDER BY ' + orderBy;
|
||||
}
|
||||
return sql + limit;
|
||||
}
|
||||
|
||||
/*!
|
||||
* Build sql for listing views
|
||||
* @param options {all: for all owners, owner: for a given owner}
|
||||
* @returns {string} The sql statement
|
||||
*/
|
||||
function queryViews(options) {
|
||||
var sqlViews = null;
|
||||
if (options.views) {
|
||||
/*!
|
||||
* Build sql for listing tables
|
||||
* @param options {all: for all owners, owner: for a given owner}
|
||||
* @returns {string} The sql statement
|
||||
*/
|
||||
function queryTables(options) {
|
||||
var sqlTables = null;
|
||||
var owner = options.owner || options.schema;
|
||||
|
||||
var owner = options.owner || options.schema;
|
||||
|
||||
if (options.all && !owner) {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views',
|
||||
'table_schema, table_name', options);
|
||||
} else if (owner) {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views WHERE table_schema=\'' + owner + '\'',
|
||||
'table_schema, table_name', options);
|
||||
} else {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views',
|
||||
'table_name', options);
|
||||
}
|
||||
}
|
||||
return sqlViews;
|
||||
if (options.all && !owner) {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name", table_schema AS "owner"'
|
||||
+ ' FROM information_schema.tables', 'table_schema, table_name', options);
|
||||
} else if (owner) {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name", table_schema AS "owner"'
|
||||
+ ' FROM information_schema.tables WHERE table_schema=\'' + owner + '\'', 'table_schema, table_name', options);
|
||||
} else {
|
||||
sqlTables = paginateSQL('SELECT \'table\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.tables',
|
||||
'table_name', options);
|
||||
}
|
||||
return sqlTables;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover model definitions
|
||||
*
|
||||
* @param {Object} options Options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverModelDefinitions = function (options, cb) {
|
||||
if (!cb && typeof options === 'function') {
|
||||
cb = options;
|
||||
options = {};
|
||||
/*!
|
||||
* Build sql for listing views
|
||||
* @param options {all: for all owners, owner: for a given owner}
|
||||
* @returns {string} The sql statement
|
||||
*/
|
||||
function queryViews(options) {
|
||||
var sqlViews = null;
|
||||
if (options.views) {
|
||||
|
||||
var owner = options.owner || options.schema;
|
||||
|
||||
if (options.all && !owner) {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views',
|
||||
'table_schema, table_name', options);
|
||||
} else if (owner) {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views WHERE table_schema=\'' + owner + '\'',
|
||||
'table_schema, table_name', options);
|
||||
} else {
|
||||
sqlViews = paginateSQL('SELECT \'view\' AS "type", table_name AS "name",'
|
||||
+ ' table_schema AS "owner" FROM information_schema.views',
|
||||
'table_name', options);
|
||||
}
|
||||
}
|
||||
return sqlViews;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover model definitions
|
||||
*
|
||||
* @param {Object} options Options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverModelDefinitions = function (options, cb) {
|
||||
if (!cb && typeof options === 'function') {
|
||||
cb = options;
|
||||
options = {};
|
||||
}
|
||||
options = options || {};
|
||||
|
||||
var self = this;
|
||||
var calls = [function (callback) {
|
||||
self.query(queryTables(options), callback);
|
||||
}];
|
||||
|
||||
if (options.views) {
|
||||
calls.push(function (callback) {
|
||||
self.query(queryViews(options), callback);
|
||||
});
|
||||
}
|
||||
async.parallel(calls, function (err, data) {
|
||||
if (err) {
|
||||
cb(err, data);
|
||||
} else {
|
||||
var merged = [];
|
||||
merged = merged.concat(data.shift());
|
||||
if (data.length) {
|
||||
merged = merged.concat(data.shift());
|
||||
}
|
||||
options = options || {};
|
||||
cb(err, merged);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var self = this;
|
||||
var calls = [function (callback) {
|
||||
self.query(queryTables(options), callback);
|
||||
}];
|
||||
/*!
|
||||
* Normalize the arguments
|
||||
* @param table string, required
|
||||
* @param options object, optional
|
||||
* @param cb function, optional
|
||||
*/
|
||||
function getArgs(table, options, cb) {
|
||||
if ('string' !== typeof table || !table) {
|
||||
throw new Error('table is a required string argument: ' + table);
|
||||
}
|
||||
options = options || {};
|
||||
if (!cb && 'function' === typeof options) {
|
||||
cb = options;
|
||||
options = {};
|
||||
}
|
||||
if (typeof options !== 'object') {
|
||||
throw new Error('options must be an object: ' + options);
|
||||
}
|
||||
return {
|
||||
owner: options.owner || options.schema,
|
||||
table: table,
|
||||
options: options,
|
||||
cb: cb
|
||||
};
|
||||
}
|
||||
|
||||
if (options.views) {
|
||||
calls.push(function (callback) {
|
||||
self.query(queryViews(options), callback);
|
||||
});
|
||||
}
|
||||
async.parallel(calls, function (err, data) {
|
||||
if (err) {
|
||||
cb(err, data);
|
||||
} else {
|
||||
var merged = [];
|
||||
merged = merged.concat(data.shift());
|
||||
if (data.length) {
|
||||
merged = merged.concat(data.shift());
|
||||
}
|
||||
cb(err, merged);
|
||||
}
|
||||
/*!
|
||||
* Build the sql statement to query columns for a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {String} The sql statement
|
||||
*/
|
||||
function queryColumns(owner, table) {
|
||||
var sql = null;
|
||||
if (owner) {
|
||||
sql = paginateSQL('SELECT table_schema AS "owner", table_name AS "tableName", column_name AS "columnName", data_type AS "dataType",'
|
||||
+ ' character_octet_length AS "dataLength", numeric_precision AS "dataPrecision", numeric_scale AS "dataScale", is_nullable AS "nullable"'
|
||||
+ ' FROM information_schema.columns'
|
||||
+ ' WHERE table_schema=\'' + owner + '\''
|
||||
+ (table ? ' AND table_name=\'' + table + '\'' : ''),
|
||||
'table_name, ordinal_position', {});
|
||||
} else {
|
||||
sql = paginateSQL('SELECT table_schema AS "owner", table_name AS "tableName", column_name AS "columnName", data_type AS "dataType",'
|
||||
+ ' character_octet_length AS "dataLength", numeric_precision AS "dataPrecision", numeric_scale AS "dataScale", is_nullable AS "nullable"'
|
||||
+ ' FROM information_schema.columns'
|
||||
+ (table ? ' WHERE table_name=\'' + table + '\'' : ''),
|
||||
'table_name, ordinal_position', {});
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover model properties from a table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*
|
||||
*/
|
||||
MySQL.prototype.discoverModelProperties = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryColumns(owner, table);
|
||||
var callback = function (err, results) {
|
||||
if (err) {
|
||||
cb(err, results);
|
||||
} else {
|
||||
results.map(function (r) {
|
||||
r.type = mysqlDataTypeToJSONType(r.dataType, r.dataLength);
|
||||
});
|
||||
cb(err, results);
|
||||
}
|
||||
};
|
||||
this.query(sql, callback);
|
||||
};
|
||||
|
||||
/*!
|
||||
* Normalize the arguments
|
||||
* @param table string, required
|
||||
* @param options object, optional
|
||||
* @param cb function, optional
|
||||
*/
|
||||
function getArgs(table, options, cb) {
|
||||
if ('string' !== typeof table || !table) {
|
||||
throw new Error('table is a required string argument: ' + table);
|
||||
}
|
||||
options = options || {};
|
||||
if (!cb && 'function' === typeof options) {
|
||||
cb = options;
|
||||
options = {};
|
||||
}
|
||||
if (typeof options !== 'object') {
|
||||
throw new Error('options must be an object: ' + options);
|
||||
}
|
||||
return {
|
||||
owner: options.owner || options.schema,
|
||||
table: table,
|
||||
options: options,
|
||||
cb: cb
|
||||
};
|
||||
}
|
||||
|
||||
/*!
|
||||
* Build the sql statement to query columns for a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {String} The sql statement
|
||||
*/
|
||||
function queryColumns(owner, table) {
|
||||
var sql = null;
|
||||
if (owner) {
|
||||
sql = paginateSQL('SELECT table_schema AS "owner", table_name AS "tableName", column_name AS "columnName", data_type AS "dataType",'
|
||||
+ ' character_octet_length AS "dataLength", numeric_precision AS "dataPrecision", numeric_scale AS "dataScale", is_nullable AS "nullable"'
|
||||
+ ' FROM information_schema.columns'
|
||||
+ ' WHERE table_schema=\'' + owner + '\''
|
||||
+ (table ? ' AND table_name=\'' + table + '\'' : ''),
|
||||
'table_name, ordinal_position', {});
|
||||
} else {
|
||||
sql = paginateSQL('SELECT table_schema AS "owner", table_name AS "tableName", column_name AS "columnName", data_type AS "dataType",'
|
||||
+ ' character_octet_length AS "dataLength", numeric_precision AS "dataPrecision", numeric_scale AS "dataScale", is_nullable AS "nullable"'
|
||||
+ ' FROM information_schema.columns'
|
||||
+ (table ? ' WHERE table_name=\'' + table + '\'' : ''),
|
||||
'table_name, ordinal_position', {});
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover model properties from a table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*
|
||||
*/
|
||||
MySQL.prototype.discoverModelProperties = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryColumns(owner, table);
|
||||
var callback = function (err, results) {
|
||||
if (err) {
|
||||
cb(err, results);
|
||||
} else {
|
||||
results.map(function (r) {
|
||||
r.type = mysqlDataTypeToJSONType(r.dataType, r.dataLength);
|
||||
});
|
||||
cb(err, results);
|
||||
}
|
||||
};
|
||||
this.query(sql, callback);
|
||||
};
|
||||
|
||||
/*!
|
||||
* Build the sql statement for querying primary keys of a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
/*!
|
||||
* Build the sql statement for querying primary keys of a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
// http://docs.oracle.com/javase/6/docs/api/java/sql/DatabaseMetaData.html#getPrimaryKeys(java.lang.String, java.lang.String, java.lang.String)
|
||||
function queryForPrimaryKeys(owner, table) {
|
||||
var sql = 'SELECT table_schema AS "owner", '
|
||||
+ 'table_name AS "tableName", column_name AS "columnName", ordinal_position AS "keySeq", constraint_name AS "pkName" FROM'
|
||||
+ ' information_schema.key_column_usage'
|
||||
+ ' WHERE constraint_name=\'PRIMARY\'';
|
||||
function queryForPrimaryKeys(owner, table) {
|
||||
var sql = 'SELECT table_schema AS "owner", '
|
||||
+ 'table_name AS "tableName", column_name AS "columnName", ordinal_position AS "keySeq", constraint_name AS "pkName" FROM'
|
||||
+ ' information_schema.key_column_usage'
|
||||
+ ' WHERE constraint_name=\'PRIMARY\'';
|
||||
|
||||
if (owner) {
|
||||
sql += ' AND table_schema=\'' + owner + '\'';
|
||||
}
|
||||
if (table) {
|
||||
sql += ' AND table_name=\'' + table + '\'';
|
||||
}
|
||||
sql += ' ORDER BY table_schema, constraint_name, table_name, ordinal_position';
|
||||
return sql;
|
||||
if (owner) {
|
||||
sql += ' AND table_schema=\'' + owner + '\'';
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover primary keys for a given table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverPrimaryKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryForPrimaryKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
|
||||
/*!
|
||||
* Build the sql statement for querying foreign keys of a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
function queryForeignKeys(owner, table) {
|
||||
var sql =
|
||||
'SELECT table_schema AS "fkOwner", constraint_name AS "fkName", table_name AS "fkTableName",'
|
||||
+ ' column_name AS "fkColumnName", ordinal_position AS "keySeq",'
|
||||
+ ' referenced_table_schema AS "pkOwner", \'PRIMARY\' AS "pkName", '
|
||||
+ ' referenced_table_name AS "pkTableName", referenced_column_name AS "pkColumnName"'
|
||||
+ ' FROM information_schema.key_column_usage'
|
||||
+ ' WHERE'
|
||||
+ ' constraint_name!=\'PRIMARY\' and POSITION_IN_UNIQUE_CONSTRAINT IS NOT NULL';
|
||||
if (owner) {
|
||||
sql += ' AND table_schema=\'' + owner + '\'';
|
||||
}
|
||||
if (table) {
|
||||
sql += ' AND table_name=\'' + table + '\'';
|
||||
}
|
||||
return sql;
|
||||
if (table) {
|
||||
sql += ' AND table_name=\'' + table + '\'';
|
||||
}
|
||||
sql += ' ORDER BY table_schema, constraint_name, table_name, ordinal_position';
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover foreign keys for a given table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverForeignKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
/**
|
||||
* Discover primary keys for a given table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverPrimaryKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryForeignKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
var sql = queryForPrimaryKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
|
||||
/*!
|
||||
* Retrieves a description of the foreign key columns that reference the given table's primary key columns (the foreign keys exported by a table).
|
||||
* They are ordered by fkTableOwner, fkTableName, and keySeq.
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
function queryExportedForeignKeys(owner, table) {
|
||||
var sql = 'SELECT a.constraint_name AS "fkName", a.table_schema AS "fkOwner", a.table_name AS "fkTableName",'
|
||||
+ ' a.column_name AS "fkColumnName", a.ordinal_position AS "keySeq",'
|
||||
+ ' NULL AS "pkName", a.referenced_table_schema AS "pkOwner",'
|
||||
+ ' a.referenced_table_name AS "pkTableName", a.referenced_column_name AS "pkColumnName"'
|
||||
+ ' FROM'
|
||||
+ ' information_schema.key_column_usage a'
|
||||
+ ' WHERE a.position_in_unique_constraint IS NOT NULL';
|
||||
if (owner) {
|
||||
sql += ' and a.referenced_table_schema=\'' + owner + '\'';
|
||||
}
|
||||
if (table) {
|
||||
sql += ' and a.referenced_table_name=\'' + table + '\'';
|
||||
}
|
||||
sql += ' order by a.table_schema, a.table_name, a.ordinal_position';
|
||||
|
||||
return sql;
|
||||
/*!
|
||||
* Build the sql statement for querying foreign keys of a given table
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
function queryForeignKeys(owner, table) {
|
||||
var sql =
|
||||
'SELECT table_schema AS "fkOwner", constraint_name AS "fkName", table_name AS "fkTableName",'
|
||||
+ ' column_name AS "fkColumnName", ordinal_position AS "keySeq",'
|
||||
+ ' referenced_table_schema AS "pkOwner", \'PRIMARY\' AS "pkName", '
|
||||
+ ' referenced_table_name AS "pkTableName", referenced_column_name AS "pkColumnName"'
|
||||
+ ' FROM information_schema.key_column_usage'
|
||||
+ ' WHERE'
|
||||
+ ' constraint_name!=\'PRIMARY\' and POSITION_IN_UNIQUE_CONSTRAINT IS NOT NULL';
|
||||
if (owner) {
|
||||
sql += ' AND table_schema=\'' + owner + '\'';
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover foreign keys that reference to the primary key of this table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverExportedForeignKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryExportedForeignKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
|
||||
function mysqlDataTypeToJSONType(mysqlType, dataLength) {
|
||||
var type = mysqlType.toUpperCase();
|
||||
switch (type) {
|
||||
case 'CHAR':
|
||||
if (dataLength === 1) {
|
||||
// Treat char(1) as boolean
|
||||
return 'Boolean';
|
||||
} else {
|
||||
return 'String';
|
||||
}
|
||||
|
||||
case 'VARCHAR':
|
||||
case 'TINYTEXT':
|
||||
case 'MEDIUMTEXT':
|
||||
case 'LONGTEXT':
|
||||
case 'TEXT':
|
||||
case 'ENUM':
|
||||
case 'SET':
|
||||
return 'String';
|
||||
case 'TINYBLOB':
|
||||
case 'MEDIUMBLOB':
|
||||
case 'LONGBLOB':
|
||||
case 'BLOB':
|
||||
case 'BINARY':
|
||||
case 'VARBINARY':
|
||||
case 'BIT':
|
||||
return 'Binary';
|
||||
case 'TINYINT':
|
||||
case 'SMALLINT':
|
||||
case 'INT':
|
||||
case 'MEDIUMINT':
|
||||
case 'YEAR':
|
||||
case 'FLOAT':
|
||||
case 'DOUBLE':
|
||||
return 'Number';
|
||||
case 'DATE':
|
||||
case 'TIMESTAMP':
|
||||
case 'DATETIME':
|
||||
return 'Date';
|
||||
case 'POINT'
|
||||
return 'GeoPoint';
|
||||
default:
|
||||
return 'String';
|
||||
}
|
||||
if (table) {
|
||||
sql += ' AND table_name=\'' + table + '\'';
|
||||
}
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover foreign keys for a given table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverForeignKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryForeignKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
|
||||
/*!
|
||||
* Retrieves a description of the foreign key columns that reference the given table's primary key columns (the foreign keys exported by a table).
|
||||
* They are ordered by fkTableOwner, fkTableName, and keySeq.
|
||||
* @param owner
|
||||
* @param table
|
||||
* @returns {string}
|
||||
*/
|
||||
function queryExportedForeignKeys(owner, table) {
|
||||
var sql = 'SELECT a.constraint_name AS "fkName", a.table_schema AS "fkOwner", a.table_name AS "fkTableName",'
|
||||
+ ' a.column_name AS "fkColumnName", a.ordinal_position AS "keySeq",'
|
||||
+ ' NULL AS "pkName", a.referenced_table_schema AS "pkOwner",'
|
||||
+ ' a.referenced_table_name AS "pkTableName", a.referenced_column_name AS "pkColumnName"'
|
||||
+ ' FROM'
|
||||
+ ' information_schema.key_column_usage a'
|
||||
+ ' WHERE a.position_in_unique_constraint IS NOT NULL';
|
||||
if (owner) {
|
||||
sql += ' and a.referenced_table_schema=\'' + owner + '\'';
|
||||
}
|
||||
if (table) {
|
||||
sql += ' and a.referenced_table_name=\'' + table + '\'';
|
||||
}
|
||||
sql += ' order by a.table_schema, a.table_name, a.ordinal_position';
|
||||
|
||||
return sql;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover foreign keys that reference to the primary key of this table
|
||||
* @param {String} table The table name
|
||||
* @param {Object} options The options for discovery
|
||||
* @param {Function} [cb] The callback function
|
||||
*/
|
||||
MySQL.prototype.discoverExportedForeignKeys = function (table, options, cb) {
|
||||
var args = getArgs(table, options, cb);
|
||||
var owner = args.owner;
|
||||
table = args.table;
|
||||
options = args.options;
|
||||
cb = args.cb;
|
||||
|
||||
var sql = queryExportedForeignKeys(owner, table);
|
||||
this.query(sql, cb);
|
||||
};
|
||||
|
||||
function mysqlDataTypeToJSONType(mysqlType, dataLength) {
|
||||
var type = mysqlType.toUpperCase();
|
||||
switch (type) {
|
||||
case 'CHAR':
|
||||
if (dataLength === 1) {
|
||||
// Treat char(1) as boolean
|
||||
return 'Boolean';
|
||||
} else {
|
||||
return 'String';
|
||||
}
|
||||
|
||||
case 'VARCHAR':
|
||||
case 'TINYTEXT':
|
||||
case 'MEDIUMTEXT':
|
||||
case 'LONGTEXT':
|
||||
case 'TEXT':
|
||||
case 'ENUM':
|
||||
case 'SET':
|
||||
return 'String';
|
||||
case 'TINYBLOB':
|
||||
case 'MEDIUMBLOB':
|
||||
case 'LONGBLOB':
|
||||
case 'BLOB':
|
||||
case 'BINARY':
|
||||
case 'VARBINARY':
|
||||
case 'BIT':
|
||||
return 'Binary';
|
||||
case 'TINYINT':
|
||||
case 'SMALLINT':
|
||||
case 'INT':
|
||||
case 'MEDIUMINT':
|
||||
case 'YEAR':
|
||||
case 'FLOAT':
|
||||
case 'DOUBLE':
|
||||
return 'Number';
|
||||
case 'DATE':
|
||||
case 'TIMESTAMP':
|
||||
case 'DATETIME':
|
||||
return 'Date';
|
||||
case 'POINT'
|
||||
return 'GeoPoint';
|
||||
default:
|
||||
return 'String';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,42 +1,42 @@
|
|||
var EnumFactory = function() {
|
||||
if(arguments.length > 0){
|
||||
var Enum = function Enum(arg){
|
||||
if(typeof arg === 'number' && arg % 1 == 0) {
|
||||
return Enum._values[arg];
|
||||
} else if(Enum[arg]){
|
||||
return Enum[arg]
|
||||
} else if (Enum._values.indexOf(arg) !== -1 ) {
|
||||
return arg;
|
||||
} else if (arg === null) {
|
||||
return null;
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
};
|
||||
var dxList = [];
|
||||
dxList.push(''); // Want empty value to be at index 0 to match MySQL Enum values and MySQL non-strict behavior.
|
||||
for(var arg in arguments){
|
||||
arg = String(arguments[arg]);
|
||||
Object.defineProperty(Enum, arg.toUpperCase(), {configurable: false, enumerable: true, value: arg, writable: false});
|
||||
dxList.push(arg);
|
||||
}
|
||||
Object.defineProperty(Enum, '_values', {configurable: false, enumerable: false, value: dxList, writable: false});
|
||||
Object.defineProperty(Enum, '_string', {configurable: false, enumerable: false, value: stringified(Enum), writable: false});
|
||||
Object.freeze(Enum);
|
||||
return Enum;
|
||||
} else {
|
||||
throw "No arguments - could not create Enum.";
|
||||
var EnumFactory = function () {
|
||||
if (arguments.length > 0) {
|
||||
var Enum = function Enum(arg) {
|
||||
if (typeof arg === 'number' && arg % 1 == 0) {
|
||||
return Enum._values[arg];
|
||||
} else if (Enum[arg]) {
|
||||
return Enum[arg]
|
||||
} else if (Enum._values.indexOf(arg) !== -1) {
|
||||
return arg;
|
||||
} else if (arg === null) {
|
||||
return null;
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
};
|
||||
var dxList = [];
|
||||
dxList.push(''); // Want empty value to be at index 0 to match MySQL Enum values and MySQL non-strict behavior.
|
||||
for (var arg in arguments) {
|
||||
arg = String(arguments[arg]);
|
||||
Object.defineProperty(Enum, arg.toUpperCase(), {configurable: false, enumerable: true, value: arg, writable: false});
|
||||
dxList.push(arg);
|
||||
}
|
||||
Object.defineProperty(Enum, '_values', {configurable: false, enumerable: false, value: dxList, writable: false});
|
||||
Object.defineProperty(Enum, '_string', {configurable: false, enumerable: false, value: stringified(Enum), writable: false});
|
||||
Object.freeze(Enum);
|
||||
return Enum;
|
||||
} else {
|
||||
throw "No arguments - could not create Enum.";
|
||||
}
|
||||
};
|
||||
|
||||
function stringified(anEnum) {
|
||||
var s = [];
|
||||
for(var i in anEnum._values){
|
||||
if(anEnum._values[i] != ''){
|
||||
s.push("'" + anEnum._values[i] + "'");
|
||||
}
|
||||
var s = [];
|
||||
for (var i in anEnum._values) {
|
||||
if (anEnum._values[i] != '') {
|
||||
s.push("'" + anEnum._values[i] + "'");
|
||||
}
|
||||
return s.join(',');
|
||||
}
|
||||
return s.join(',');
|
||||
}
|
||||
|
||||
exports.EnumFactory = EnumFactory;
|
||||
|
|
1659
lib/mysql.js
1659
lib/mysql.js
File diff suppressed because it is too large
Load Diff
|
@ -3,105 +3,102 @@ var assert = require('assert');
|
|||
|
||||
var db, DummyModel, odb;
|
||||
|
||||
describe('migrations', function() {
|
||||
describe('migrations', function () {
|
||||
|
||||
before(function() {
|
||||
require('./init.js');
|
||||
before(function () {
|
||||
require('./init.js');
|
||||
|
||||
odb = getDataSource({collation: 'utf8_general_ci', createDatabase: true});
|
||||
db = odb;
|
||||
odb = getDataSource({collation: 'utf8_general_ci', createDatabase: true});
|
||||
db = odb;
|
||||
});
|
||||
|
||||
it('should use utf8 charset', function (done) {
|
||||
|
||||
var test_set = /utf8/;
|
||||
var test_collo = /utf8_general_ci/;
|
||||
var test_set_str = 'utf8';
|
||||
var test_set_collo = 'utf8_general_ci';
|
||||
charsetTest(test_set, test_collo, test_set_str, test_set_collo, done);
|
||||
|
||||
});
|
||||
|
||||
it('should disconnect first db', function (done) {
|
||||
db.client.end(function () {
|
||||
odb = getSchema();
|
||||
done()
|
||||
});
|
||||
});
|
||||
|
||||
it('should use latin1 charset', function (done) {
|
||||
|
||||
it('should use utf8 charset', function(done) {
|
||||
var test_set = /latin1/;
|
||||
var test_collo = /latin1_general_ci/;
|
||||
var test_set_str = 'latin1';
|
||||
var test_set_collo = 'latin1_general_ci';
|
||||
charsetTest(test_set, test_collo, test_set_str, test_set_collo, done);
|
||||
|
||||
var test_set = /utf8/;
|
||||
var test_collo = /utf8_general_ci/;
|
||||
var test_set_str = 'utf8';
|
||||
var test_set_collo = 'utf8_general_ci';
|
||||
charsetTest(test_set, test_collo, test_set_str, test_set_collo, done);
|
||||
});
|
||||
|
||||
it('should drop db and disconnect all', function (done) {
|
||||
db.connector.query('DROP DATABASE IF EXISTS ' + db.settings.database, function (err) {
|
||||
db.client.end(function () {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should disconnect first db', function(done) {
|
||||
db.client.end(function(){
|
||||
odb = getSchema();
|
||||
done()
|
||||
});
|
||||
});
|
||||
|
||||
it('should use latin1 charset', function(done) {
|
||||
|
||||
var test_set = /latin1/;
|
||||
var test_collo = /latin1_general_ci/;
|
||||
var test_set_str = 'latin1';
|
||||
var test_set_collo = 'latin1_general_ci';
|
||||
charsetTest(test_set, test_collo, test_set_str, test_set_collo, done);
|
||||
|
||||
});
|
||||
|
||||
it('should drop db and disconnect all', function(done) {
|
||||
db.connector.query('DROP DATABASE IF EXISTS ' + db.settings.database, function(err) {
|
||||
db.client.end(function(){
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function charsetTest(test_set, test_collo, test_set_str, test_set_collo, done) {
|
||||
|
||||
query('DROP DATABASE IF EXISTS ' + odb.settings.database, function (err) {
|
||||
assert.ok(!err);
|
||||
odb.client.end(function () {
|
||||
|
||||
function charsetTest(test_set, test_collo, test_set_str, test_set_collo, done){
|
||||
|
||||
query('DROP DATABASE IF EXISTS ' + odb.settings.database, function(err) {
|
||||
assert.ok(!err);
|
||||
odb.client.end(function(){
|
||||
|
||||
db = getSchema({collation: test_set_collo, createDatabase: true});
|
||||
DummyModel = db.define('DummyModel', {string: String});
|
||||
db.automigrate(function(){
|
||||
var q = 'SELECT DEFAULT_COLLATION_NAME FROM information_schema.SCHEMATA WHERE SCHEMA_NAME = ' + db.client.escape(db.settings.database) + ' LIMIT 1';
|
||||
db.connector.query(q, function(err, r) {
|
||||
assert.ok(!err);
|
||||
assert.ok(r[0].DEFAULT_COLLATION_NAME.match(test_collo));
|
||||
db.connector.query('SHOW VARIABLES LIKE "character_set%"', function(err, r){
|
||||
assert.ok(!err);
|
||||
var hit_all = 0;
|
||||
for (var result in r) {
|
||||
hit_all += matchResult(r[result], 'character_set_connection', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_database', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_results', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_client', test_set);
|
||||
}
|
||||
assert.equal(hit_all, 4);
|
||||
});
|
||||
db.connector.query('SHOW VARIABLES LIKE "collation%"', function(err, r){
|
||||
assert.ok(!err);
|
||||
var hit_all = 0;
|
||||
for (var result in r) {
|
||||
hit_all += matchResult(r[result], 'collation_connection', test_set);
|
||||
hit_all += matchResult(r[result], 'collation_database', test_set);
|
||||
}
|
||||
assert.equal(hit_all, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
db = getSchema({collation: test_set_collo, createDatabase: true});
|
||||
DummyModel = db.define('DummyModel', {string: String});
|
||||
db.automigrate(function () {
|
||||
var q = 'SELECT DEFAULT_COLLATION_NAME FROM information_schema.SCHEMATA WHERE SCHEMA_NAME = ' + db.client.escape(db.settings.database) + ' LIMIT 1';
|
||||
db.connector.query(q, function (err, r) {
|
||||
assert.ok(!err);
|
||||
assert.ok(r[0].DEFAULT_COLLATION_NAME.match(test_collo));
|
||||
db.connector.query('SHOW VARIABLES LIKE "character_set%"', function (err, r) {
|
||||
assert.ok(!err);
|
||||
var hit_all = 0;
|
||||
for (var result in r) {
|
||||
hit_all += matchResult(r[result], 'character_set_connection', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_database', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_results', test_set);
|
||||
hit_all += matchResult(r[result], 'character_set_client', test_set);
|
||||
}
|
||||
assert.equal(hit_all, 4);
|
||||
});
|
||||
db.connector.query('SHOW VARIABLES LIKE "collation%"', function (err, r) {
|
||||
assert.ok(!err);
|
||||
var hit_all = 0;
|
||||
for (var result in r) {
|
||||
hit_all += matchResult(r[result], 'collation_connection', test_set);
|
||||
hit_all += matchResult(r[result], 'collation_database', test_set);
|
||||
}
|
||||
assert.equal(hit_all, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function matchResult(result, variable_name, match) {
|
||||
if(result.Variable_name == variable_name){
|
||||
assert.ok(result.Value.match(match));
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
if (result.Variable_name == variable_name) {
|
||||
assert.ok(result.Value.match(match));
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
var query = function (sql, cb) {
|
||||
odb.connector.query(sql, cb);
|
||||
odb.connector.query(sql, cb);
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -3,129 +3,129 @@ var assert = require('assert');
|
|||
|
||||
var db, EnumModel, ANIMAL_ENUM;
|
||||
|
||||
describe('MySQL specific datatypes', function() {
|
||||
describe('MySQL specific datatypes', function () {
|
||||
|
||||
before(setup);
|
||||
before(setup);
|
||||
|
||||
it('should run migration', function(done) {
|
||||
db.automigrate(function(){
|
||||
done();
|
||||
});
|
||||
it('should run migration', function (done) {
|
||||
db.automigrate(function () {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('An enum should parse itself', function(done) {
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM('cat'));
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM('CAT'));
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM(2));
|
||||
assert.equal(ANIMAL_ENUM.CAT, 'cat');
|
||||
assert.equal(ANIMAL_ENUM(null), null);
|
||||
assert.equal(ANIMAL_ENUM(''), '');
|
||||
assert.equal(ANIMAL_ENUM(0), '');
|
||||
it('An enum should parse itself', function (done) {
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM('cat'));
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM('CAT'));
|
||||
assert.equal(ANIMAL_ENUM.CAT, ANIMAL_ENUM(2));
|
||||
assert.equal(ANIMAL_ENUM.CAT, 'cat');
|
||||
assert.equal(ANIMAL_ENUM(null), null);
|
||||
assert.equal(ANIMAL_ENUM(''), '');
|
||||
assert.equal(ANIMAL_ENUM(0), '');
|
||||
done();
|
||||
});
|
||||
|
||||
it('should create a model instance with Enums', function (done) {
|
||||
var em = EnumModel.create({animal: ANIMAL_ENUM.CAT, condition: 'sleepy', mood: 'happy'}, function (err, obj) {
|
||||
assert.ok(!err);
|
||||
assert.equal(obj.condition, 'sleepy');
|
||||
EnumModel.findOne({where: {animal: ANIMAL_ENUM.CAT}}, function (err, found) {
|
||||
assert.ok(!err);
|
||||
assert.equal(found.mood, 'happy');
|
||||
assert.equal(found.animal, ANIMAL_ENUM.CAT);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a model instance with Enums', function(done) {
|
||||
var em = EnumModel.create({animal: ANIMAL_ENUM.CAT, condition: 'sleepy', mood: 'happy'}, function(err, obj) {
|
||||
assert.ok(!err);
|
||||
assert.equal(obj.condition, 'sleepy');
|
||||
EnumModel.findOne({where: {animal: ANIMAL_ENUM.CAT}}, function(err, found){
|
||||
assert.ok(!err);
|
||||
assert.equal(found.mood, 'happy');
|
||||
assert.equal(found.animal, ANIMAL_ENUM.CAT);
|
||||
done();
|
||||
});
|
||||
});
|
||||
it('should fail spectacularly with invalid enum values', function (done) {
|
||||
var em = EnumModel.create({animal: 'horse', condition: 'sleepy', mood: 'happy'}, function (err, obj) {
|
||||
assert.ok(!err);
|
||||
EnumModel.findById(obj.id, function (err, found) {
|
||||
assert.ok(!err);
|
||||
assert.equal(found.animal, ''); // MySQL fun.
|
||||
assert.equal(found.animal, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should fail spectacularly with invalid enum values', function(done) {
|
||||
var em = EnumModel.create({animal: 'horse', condition: 'sleepy', mood: 'happy'}, function(err, obj) {
|
||||
assert.ok(!err);
|
||||
EnumModel.findById(obj.id, function(err, found){
|
||||
assert.ok(!err);
|
||||
assert.equal(found.animal, ''); // MySQL fun.
|
||||
assert.equal(found.animal, 0);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should disconnect when done', function(done) {
|
||||
db.disconnect();
|
||||
done()
|
||||
});
|
||||
it('should disconnect when done', function (done) {
|
||||
db.disconnect();
|
||||
done()
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
function setup(done) {
|
||||
|
||||
require('./init.js');
|
||||
require('./init.js');
|
||||
|
||||
db = getSchema();
|
||||
db = getSchema();
|
||||
|
||||
ANIMAL_ENUM = db.EnumFactory('dog', 'cat', 'mouse');
|
||||
ANIMAL_ENUM = db.EnumFactory('dog', 'cat', 'mouse');
|
||||
|
||||
EnumModel = db.define('EnumModel', {
|
||||
animal: { type: ANIMAL_ENUM, null: false },
|
||||
condition: { type: db.EnumFactory('hungry', 'sleepy', 'thirsty') },
|
||||
mood: { type: db.EnumFactory('angry', 'happy', 'sad') }
|
||||
});
|
||||
EnumModel = db.define('EnumModel', {
|
||||
animal: { type: ANIMAL_ENUM, null: false },
|
||||
condition: { type: db.EnumFactory('hungry', 'sleepy', 'thirsty') },
|
||||
mood: { type: db.EnumFactory('angry', 'happy', 'sad') }
|
||||
});
|
||||
|
||||
blankDatabase(db, done);
|
||||
blankDatabase(db, done);
|
||||
|
||||
}
|
||||
|
||||
var query = function (sql, cb) {
|
||||
db.adapter.query(sql, cb);
|
||||
db.adapter.query(sql, cb);
|
||||
};
|
||||
|
||||
var blankDatabase = function (db, cb) {
|
||||
var dbn = db.settings.database;
|
||||
var cs = db.settings.charset;
|
||||
var co = db.settings.collation;
|
||||
query('DROP DATABASE IF EXISTS ' + dbn, function(err) {
|
||||
var q = 'CREATE DATABASE ' + dbn;
|
||||
if(cs){
|
||||
q += ' CHARACTER SET ' + cs;
|
||||
}
|
||||
if(co){
|
||||
q += ' COLLATE ' + co;
|
||||
}
|
||||
query(q, function(err) {
|
||||
query('USE '+ dbn, cb);
|
||||
});
|
||||
var dbn = db.settings.database;
|
||||
var cs = db.settings.charset;
|
||||
var co = db.settings.collation;
|
||||
query('DROP DATABASE IF EXISTS ' + dbn, function (err) {
|
||||
var q = 'CREATE DATABASE ' + dbn;
|
||||
if (cs) {
|
||||
q += ' CHARACTER SET ' + cs;
|
||||
}
|
||||
if (co) {
|
||||
q += ' COLLATE ' + co;
|
||||
}
|
||||
query(q, function (err) {
|
||||
query('USE ' + dbn, cb);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
getFields = function (model, cb) {
|
||||
query('SHOW FIELDS FROM ' + model, function(err, res) {
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
var fields = {};
|
||||
res.forEach(function(field){
|
||||
fields[field.Field] = field;
|
||||
});
|
||||
cb(err, fields);
|
||||
}
|
||||
});
|
||||
query('SHOW FIELDS FROM ' + model, function (err, res) {
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
var fields = {};
|
||||
res.forEach(function (field) {
|
||||
fields[field.Field] = field;
|
||||
});
|
||||
cb(err, fields);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getIndexes = function (model, cb) {
|
||||
query('SHOW INDEXES FROM ' + model, function(err, res) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
cb(err);
|
||||
} else {
|
||||
var indexes = {};
|
||||
// Note: this will only show the first key of compound keys
|
||||
res.forEach(function(index) {
|
||||
if (parseInt(index.Seq_in_index, 10) == 1) {
|
||||
indexes[index.Key_name] = index
|
||||
}
|
||||
});
|
||||
cb(err, indexes);
|
||||
query('SHOW INDEXES FROM ' + model, function (err, res) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
cb(err);
|
||||
} else {
|
||||
var indexes = {};
|
||||
// Note: this will only show the first key of compound keys
|
||||
res.forEach(function (index) {
|
||||
if (parseInt(index.Seq_in_index, 10) == 1) {
|
||||
indexes[index.Key_name] = index
|
||||
}
|
||||
});
|
||||
});
|
||||
cb(err, indexes);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
describe('mysql imported features', function() {
|
||||
describe('mysql imported features', function () {
|
||||
|
||||
before(function() {
|
||||
require('./init.js');
|
||||
});
|
||||
before(function () {
|
||||
require('./init.js');
|
||||
});
|
||||
|
||||
require('loopback-datasource-juggler/test/common.batch.js');
|
||||
require('loopback-datasource-juggler/test/include.test.js');
|
||||
require('loopback-datasource-juggler/test/common.batch.js');
|
||||
require('loopback-datasource-juggler/test/include.test.js');
|
||||
|
||||
});
|
||||
|
|
34
test/init.js
34
test/init.js
|
@ -4,29 +4,29 @@ var DataSource = require('loopback-datasource-juggler').DataSource;
|
|||
|
||||
var config = require('rc')('loopback', {test: {mysql: {}}}).test.mysql;
|
||||
|
||||
global.getConfig = function(options) {
|
||||
global.getConfig = function (options) {
|
||||
|
||||
var dbConf = {
|
||||
host: config.host || 'localhost',
|
||||
port: config.port || 3306,
|
||||
database: 'myapp_test',
|
||||
username: config.username,
|
||||
password: config.password,
|
||||
createDatabase: true
|
||||
};
|
||||
var dbConf = {
|
||||
host: config.host || 'localhost',
|
||||
port: config.port || 3306,
|
||||
database: 'myapp_test',
|
||||
username: config.username,
|
||||
password: config.password,
|
||||
createDatabase: true
|
||||
};
|
||||
|
||||
if (options) {
|
||||
for (var el in options) {
|
||||
dbConf[el] = options[el];
|
||||
}
|
||||
if (options) {
|
||||
for (var el in options) {
|
||||
dbConf[el] = options[el];
|
||||
}
|
||||
}
|
||||
|
||||
return dbConf;
|
||||
return dbConf;
|
||||
};
|
||||
|
||||
global.getDataSource = global.getSchema = function(options) {
|
||||
var db = new DataSource(require('../'), getConfig(options));
|
||||
return db;
|
||||
global.getDataSource = global.getSchema = function (options) {
|
||||
var db = new DataSource(require('../'), getConfig(options));
|
||||
return db;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -4,426 +4,409 @@ var Schema = require('loopback-datasource-juggler').Schema;
|
|||
|
||||
var db, UserData, StringData, NumberData, DateData;
|
||||
|
||||
describe('migrations', function() {
|
||||
describe('migrations', function () {
|
||||
|
||||
before(setup);
|
||||
before(setup);
|
||||
|
||||
it('should run migration', function(done) {
|
||||
db.automigrate(function(){
|
||||
it('should run migration', function (done) {
|
||||
db.automigrate(function () {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('UserData should have correct columns', function (done) {
|
||||
getFields('UserData', function (err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id: {
|
||||
Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
email: {
|
||||
Field: 'email',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
name: {
|
||||
Field: 'name',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
bio: {
|
||||
Field: 'bio',
|
||||
Type: 'text',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
birthDate: {
|
||||
Field: 'birthDate',
|
||||
Type: 'datetime',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
pendingPeriod: {
|
||||
Field: 'pendingPeriod',
|
||||
Type: 'int(11)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
createdByAdmin: {
|
||||
Field: 'createdByAdmin',
|
||||
Type: 'tinyint(1)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('UserData should have correct indexes', function (done) {
|
||||
// Note: getIndexes truncates multi-key indexes to the first member. Hence index1 is correct.
|
||||
getIndexes('UserData', function (err, fields) {
|
||||
// console.log('....', fields);
|
||||
assert.deepEqual(fields, { PRIMARY: { Table: 'UserData',
|
||||
Non_unique: 0,
|
||||
Key_name: 'PRIMARY',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'id',
|
||||
Collation: 'A',
|
||||
Cardinality: 0,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' },
|
||||
email: { Table: 'UserData',
|
||||
Non_unique: 1,
|
||||
Key_name: 'email',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'email',
|
||||
Collation: 'A',
|
||||
Cardinality: null,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' },
|
||||
index0: { Table: 'UserData',
|
||||
Non_unique: 1,
|
||||
Key_name: 'index0',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'email',
|
||||
Collation: 'A',
|
||||
Cardinality: null,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('StringData should have correct columns', function (done) {
|
||||
getFields('StringData', function (err, fields) {
|
||||
assert.deepEqual(fields, { id: { Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
smallString: { Field: 'smallString',
|
||||
Type: 'char(127)',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
mediumString: { Field: 'mediumString',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'NO',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
tinyText: { Field: 'tinyText',
|
||||
Type: 'tinytext',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
giantJSON: { Field: 'giantJSON',
|
||||
Type: 'longtext',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
text: { Field: 'text',
|
||||
Type: 'varchar(1024)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('NumberData should have correct columns', function (done) {
|
||||
getFields('NumberData', function (err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id: { Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
number: { Field: 'number',
|
||||
Type: 'decimal(10,3) unsigned',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
tinyInt: { Field: 'tinyInt',
|
||||
Type: 'tinyint(2)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
mediumInt: { Field: 'mediumInt',
|
||||
Type: 'mediumint(8) unsigned',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
floater: { Field: 'floater',
|
||||
Type: 'double(14,6)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('DateData should have correct columns', function (done) {
|
||||
getFields('DateData', function (err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id: { Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
dateTime: { Field: 'dateTime',
|
||||
Type: 'datetime',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
timestamp: { Field: 'timestamp',
|
||||
Type: 'timestamp',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should autoupgrade', function (done) {
|
||||
var userExists = function (cb) {
|
||||
query('SELECT * FROM UserData', function (err, res) {
|
||||
cb(!err && res[0].email == 'test@example.com');
|
||||
});
|
||||
}
|
||||
|
||||
UserData.create({email: 'test@example.com'}, function (err, user) {
|
||||
assert.ok(!err, 'Could not create user: ' + err);
|
||||
userExists(function (yep) {
|
||||
assert.ok(yep, 'User does not exist');
|
||||
});
|
||||
UserData.defineProperty('email', { type: String });
|
||||
UserData.defineProperty('name', {type: String, dataType: 'char', limit: 50});
|
||||
UserData.defineProperty('newProperty', {type: Number, unsigned: true, dataType: 'bigInt'});
|
||||
// UserData.defineProperty('pendingPeriod', false); This will not work as expected.
|
||||
db.autoupdate(function (err) {
|
||||
getFields('UserData', function (err, fields) {
|
||||
// change nullable for email
|
||||
assert.equal(fields.email.Null, 'YES', 'Email does not allow null');
|
||||
// change type of name
|
||||
assert.equal(fields.name.Type, 'char(50)', 'Name is not char(50)');
|
||||
// add new column
|
||||
assert.ok(fields.newProperty, 'New column was not added');
|
||||
if (fields.newProperty) {
|
||||
assert.equal(fields.newProperty.Type, 'bigint(20) unsigned', 'New column type is not bigint(20) unsigned');
|
||||
}
|
||||
// drop column - will not happen.
|
||||
// assert.ok(!fields.pendingPeriod, 'Did not drop column pendingPeriod');
|
||||
// user still exists
|
||||
userExists(function (yep) {
|
||||
assert.ok(yep, 'User does not exist');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('UserData should have correct columns', function(done) {
|
||||
getFields('UserData', function(err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id: {
|
||||
Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
email: {
|
||||
Field: 'email',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
name: {
|
||||
Field: 'name',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
bio: {
|
||||
Field: 'bio',
|
||||
Type: 'text',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
birthDate: {
|
||||
Field: 'birthDate',
|
||||
Type: 'datetime',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
pendingPeriod: {
|
||||
Field: 'pendingPeriod',
|
||||
Type: 'int(11)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
createdByAdmin: {
|
||||
Field: 'createdByAdmin',
|
||||
Type: 'tinyint(1)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('UserData should have correct indexes', function(done) {
|
||||
// Note: getIndexes truncates multi-key indexes to the first member. Hence index1 is correct.
|
||||
getIndexes('UserData', function(err, fields) {
|
||||
// console.log('....', fields);
|
||||
assert.deepEqual(fields, { PRIMARY:
|
||||
{ Table: 'UserData',
|
||||
Non_unique: 0,
|
||||
Key_name: 'PRIMARY',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'id',
|
||||
Collation: 'A',
|
||||
Cardinality: 0,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' },
|
||||
email:
|
||||
{ Table: 'UserData',
|
||||
Non_unique: 1,
|
||||
Key_name: 'email',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'email',
|
||||
Collation: 'A',
|
||||
Cardinality: null,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' },
|
||||
index0:
|
||||
{ Table: 'UserData',
|
||||
Non_unique: 1,
|
||||
Key_name: 'index0',
|
||||
Seq_in_index: 1,
|
||||
Column_name: 'email',
|
||||
Collation: 'A',
|
||||
Cardinality: null,
|
||||
Sub_part: null,
|
||||
Packed: null,
|
||||
Null: '',
|
||||
Index_type: 'BTREE',
|
||||
Comment: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('StringData should have correct columns', function(done) {
|
||||
getFields('StringData', function(err, fields) {
|
||||
assert.deepEqual(fields, { id:
|
||||
{ Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
smallString:
|
||||
{ Field: 'smallString',
|
||||
Type: 'char(127)',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
mediumString:
|
||||
{ Field: 'mediumString',
|
||||
Type: 'varchar(255)',
|
||||
Null: 'NO',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
tinyText:
|
||||
{ Field: 'tinyText',
|
||||
Type: 'tinytext',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
giantJSON:
|
||||
{ Field: 'giantJSON',
|
||||
Type: 'longtext',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
text:
|
||||
{ Field: 'text',
|
||||
Type: 'varchar(1024)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('NumberData should have correct columns', function(done) {
|
||||
getFields('NumberData', function(err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id:
|
||||
{ Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
number:
|
||||
{ Field: 'number',
|
||||
Type: 'decimal(10,3) unsigned',
|
||||
Null: 'NO',
|
||||
Key: 'MUL',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
tinyInt:
|
||||
{ Field: 'tinyInt',
|
||||
Type: 'tinyint(2)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
mediumInt:
|
||||
{ Field: 'mediumInt',
|
||||
Type: 'mediumint(8) unsigned',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
floater:
|
||||
{ Field: 'floater',
|
||||
Type: 'double(14,6)',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('DateData should have correct columns', function(done) {
|
||||
getFields('DateData', function(err, fields) {
|
||||
assert.deepEqual(fields, {
|
||||
id:
|
||||
{ Field: 'id',
|
||||
Type: 'int(11)',
|
||||
Null: 'NO',
|
||||
Key: 'PRI',
|
||||
Default: null,
|
||||
Extra: 'auto_increment' },
|
||||
dateTime:
|
||||
{ Field: 'dateTime',
|
||||
Type: 'datetime',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' },
|
||||
timestamp:
|
||||
{ Field: 'timestamp',
|
||||
Type: 'timestamp',
|
||||
Null: 'YES',
|
||||
Key: '',
|
||||
Default: null,
|
||||
Extra: '' }
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should autoupgrade', function(done) {
|
||||
var userExists = function(cb) {
|
||||
query('SELECT * FROM UserData', function(err, res) {
|
||||
cb(!err && res[0].email == 'test@example.com');
|
||||
});
|
||||
}
|
||||
|
||||
UserData.create({email: 'test@example.com'}, function(err, user) {
|
||||
assert.ok(!err, 'Could not create user: ' + err);
|
||||
userExists(function(yep) {
|
||||
assert.ok(yep, 'User does not exist');
|
||||
});
|
||||
UserData.defineProperty('email', { type: String });
|
||||
UserData.defineProperty('name', {type: String, dataType: 'char', limit: 50});
|
||||
UserData.defineProperty('newProperty', {type: Number, unsigned: true, dataType: 'bigInt'});
|
||||
// UserData.defineProperty('pendingPeriod', false); This will not work as expected.
|
||||
db.autoupdate( function(err) {
|
||||
getFields('UserData', function(err, fields) {
|
||||
// change nullable for email
|
||||
assert.equal(fields.email.Null, 'YES', 'Email does not allow null');
|
||||
// change type of name
|
||||
assert.equal(fields.name.Type, 'char(50)', 'Name is not char(50)');
|
||||
// add new column
|
||||
assert.ok(fields.newProperty, 'New column was not added');
|
||||
if (fields.newProperty) {
|
||||
assert.equal(fields.newProperty.Type, 'bigint(20) unsigned', 'New column type is not bigint(20) unsigned');
|
||||
}
|
||||
// drop column - will not happen.
|
||||
// assert.ok(!fields.pendingPeriod, 'Did not drop column pendingPeriod');
|
||||
// user still exists
|
||||
userExists(function(yep) {
|
||||
assert.ok(yep, 'User does not exist');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should check actuality of dataSource', function(done) {
|
||||
// 'drop column'
|
||||
UserData.dataSource.isActual(function(err, ok) {
|
||||
assert.ok(ok, 'dataSource is not actual (should be)');
|
||||
UserData.defineProperty('essay', {type: Schema.Text});
|
||||
// UserData.defineProperty('email', false); Can't undefine currently.
|
||||
UserData.dataSource.isActual(function(err, ok) {
|
||||
assert.ok(!ok, 'dataSource is actual (shouldn\t be)');
|
||||
done()
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow numbers with decimals', function(done) {
|
||||
NumberData.create({number: 1.1234567, tinyInt: 123456, mediumInt: -1234567, floater: 123456789.1234567 }, function(err, obj) {
|
||||
assert.ok(!err);
|
||||
assert.ok(obj);
|
||||
NumberData.findById(obj.id, function(err, found) {
|
||||
assert.equal(found.number, 1.123);
|
||||
assert.equal(found.tinyInt, 127);
|
||||
assert.equal(found.mediumInt, 0);
|
||||
assert.equal(found.floater, 99999999.999999);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow both kinds of date columns', function(done) {
|
||||
DateData.create({
|
||||
dateTime: new Date('Aug 9 1996 07:47:33 GMT'),
|
||||
timestamp: new Date('Sep 22 2007 17:12:22 GMT')
|
||||
}, function(err, obj){
|
||||
assert.ok(!err);
|
||||
assert.ok(obj);
|
||||
DateData.findById(obj.id, function(err, found){
|
||||
assert.equal(found.dateTime.toGMTString(), 'Fri, 09 Aug 1996 07:47:33 GMT');
|
||||
assert.equal(found.timestamp.toGMTString(), 'Sat, 22 Sep 2007 17:12:22 GMT');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should disconnect when done', function(done) {
|
||||
db.disconnect();
|
||||
it('should check actuality of dataSource', function (done) {
|
||||
// 'drop column'
|
||||
UserData.dataSource.isActual(function (err, ok) {
|
||||
assert.ok(ok, 'dataSource is not actual (should be)');
|
||||
UserData.defineProperty('essay', {type: Schema.Text});
|
||||
// UserData.defineProperty('email', false); Can't undefine currently.
|
||||
UserData.dataSource.isActual(function (err, ok) {
|
||||
assert.ok(!ok, 'dataSource is actual (shouldn\t be)');
|
||||
done()
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow numbers with decimals', function (done) {
|
||||
NumberData.create({number: 1.1234567, tinyInt: 123456, mediumInt: -1234567, floater: 123456789.1234567 }, function (err, obj) {
|
||||
assert.ok(!err);
|
||||
assert.ok(obj);
|
||||
NumberData.findById(obj.id, function (err, found) {
|
||||
assert.equal(found.number, 1.123);
|
||||
assert.equal(found.tinyInt, 127);
|
||||
assert.equal(found.mediumInt, 0);
|
||||
assert.equal(found.floater, 99999999.999999);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should allow both kinds of date columns', function (done) {
|
||||
DateData.create({
|
||||
dateTime: new Date('Aug 9 1996 07:47:33 GMT'),
|
||||
timestamp: new Date('Sep 22 2007 17:12:22 GMT')
|
||||
}, function (err, obj) {
|
||||
assert.ok(!err);
|
||||
assert.ok(obj);
|
||||
DateData.findById(obj.id, function (err, found) {
|
||||
assert.equal(found.dateTime.toGMTString(), 'Fri, 09 Aug 1996 07:47:33 GMT');
|
||||
assert.equal(found.timestamp.toGMTString(), 'Sat, 22 Sep 2007 17:12:22 GMT');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should disconnect when done', function (done) {
|
||||
db.disconnect();
|
||||
done()
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
function setup(done) {
|
||||
|
||||
require('./init.js');
|
||||
require('./init.js');
|
||||
|
||||
db = getSchema();
|
||||
db = getSchema();
|
||||
|
||||
UserData = db.define('UserData', {
|
||||
email: { type: String, null: false, index: true },
|
||||
name: String,
|
||||
bio: Schema.Text,
|
||||
birthDate: Date,
|
||||
pendingPeriod: Number,
|
||||
createdByAdmin: Boolean,
|
||||
} , { indexes: {
|
||||
index0: {
|
||||
columns: 'email, createdByAdmin'
|
||||
}
|
||||
}
|
||||
});
|
||||
UserData = db.define('UserData', {
|
||||
email: { type: String, null: false, index: true },
|
||||
name: String,
|
||||
bio: Schema.Text,
|
||||
birthDate: Date,
|
||||
pendingPeriod: Number,
|
||||
createdByAdmin: Boolean,
|
||||
}, { indexes: {
|
||||
index0: {
|
||||
columns: 'email, createdByAdmin'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
StringData = db.define('StringData', {
|
||||
smallString: {type: String, null: false, index: true, dataType: 'char', limit: 127},
|
||||
mediumString: {type: String, null: false, dataType: 'varchar', limit: 255},
|
||||
tinyText: {type: String, dataType: 'tinyText'},
|
||||
giantJSON: {type: Schema.JSON, dataType: 'longText'},
|
||||
text: {type: Schema.Text, dataType: 'varchar', limit: 1024}
|
||||
});
|
||||
StringData = db.define('StringData', {
|
||||
smallString: {type: String, null: false, index: true, dataType: 'char', limit: 127},
|
||||
mediumString: {type: String, null: false, dataType: 'varchar', limit: 255},
|
||||
tinyText: {type: String, dataType: 'tinyText'},
|
||||
giantJSON: {type: Schema.JSON, dataType: 'longText'},
|
||||
text: {type: Schema.Text, dataType: 'varchar', limit: 1024}
|
||||
});
|
||||
|
||||
NumberData = db.define('NumberData', {
|
||||
number: {type: Number, null: false, index: true, unsigned: true, dataType: 'decimal', precision: 10, scale: 3},
|
||||
tinyInt: {type: Number, dataType: 'tinyInt', display: 2},
|
||||
mediumInt: {type: Number, dataType: 'mediumInt', unsigned: true},
|
||||
floater: {type: Number, dataType: 'double', precision: 14, scale: 6}
|
||||
});
|
||||
NumberData = db.define('NumberData', {
|
||||
number: {type: Number, null: false, index: true, unsigned: true, dataType: 'decimal', precision: 10, scale: 3},
|
||||
tinyInt: {type: Number, dataType: 'tinyInt', display: 2},
|
||||
mediumInt: {type: Number, dataType: 'mediumInt', unsigned: true},
|
||||
floater: {type: Number, dataType: 'double', precision: 14, scale: 6}
|
||||
});
|
||||
|
||||
DateData = db.define('DateData', {
|
||||
dateTime: {type: Date, dataType: 'datetime'},
|
||||
timestamp: {type: Date, dataType: 'timestamp'}
|
||||
});
|
||||
DateData = db.define('DateData', {
|
||||
dateTime: {type: Date, dataType: 'datetime'},
|
||||
timestamp: {type: Date, dataType: 'timestamp'}
|
||||
});
|
||||
|
||||
blankDatabase(db, done);
|
||||
blankDatabase(db, done);
|
||||
|
||||
}
|
||||
|
||||
var query = function (sql, cb) {
|
||||
db.adapter.query(sql, cb);
|
||||
db.adapter.query(sql, cb);
|
||||
};
|
||||
|
||||
var blankDatabase = function (db, cb) {
|
||||
var dbn = db.settings.database;
|
||||
var cs = db.settings.charset;
|
||||
var co = db.settings.collation;
|
||||
query('DROP DATABASE IF EXISTS ' + dbn, function(err) {
|
||||
var q = 'CREATE DATABASE ' + dbn;
|
||||
if(cs){
|
||||
q += ' CHARACTER SET ' + cs;
|
||||
}
|
||||
if(co){
|
||||
q += ' COLLATE ' + co;
|
||||
}
|
||||
query(q, function(err) {
|
||||
query('USE '+ dbn, cb);
|
||||
});
|
||||
var dbn = db.settings.database;
|
||||
var cs = db.settings.charset;
|
||||
var co = db.settings.collation;
|
||||
query('DROP DATABASE IF EXISTS ' + dbn, function (err) {
|
||||
var q = 'CREATE DATABASE ' + dbn;
|
||||
if (cs) {
|
||||
q += ' CHARACTER SET ' + cs;
|
||||
}
|
||||
if (co) {
|
||||
q += ' COLLATE ' + co;
|
||||
}
|
||||
query(q, function (err) {
|
||||
query('USE ' + dbn, cb);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
getFields = function (model, cb) {
|
||||
query('SHOW FIELDS FROM ' + model, function(err, res) {
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
var fields = {};
|
||||
res.forEach(function(field){
|
||||
fields[field.Field] = field;
|
||||
});
|
||||
cb(err, fields);
|
||||
}
|
||||
});
|
||||
query('SHOW FIELDS FROM ' + model, function (err, res) {
|
||||
if (err) {
|
||||
cb(err);
|
||||
} else {
|
||||
var fields = {};
|
||||
res.forEach(function (field) {
|
||||
fields[field.Field] = field;
|
||||
});
|
||||
cb(err, fields);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getIndexes = function (model, cb) {
|
||||
query('SHOW INDEXES FROM ' + model, function(err, res) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
cb(err);
|
||||
} else {
|
||||
var indexes = {};
|
||||
// Note: this will only show the first key of compound keys
|
||||
res.forEach(function(index) {
|
||||
if (parseInt(index.Seq_in_index, 10) == 1) {
|
||||
indexes[index.Key_name] = index
|
||||
}
|
||||
});
|
||||
cb(err, indexes);
|
||||
query('SHOW INDEXES FROM ' + model, function (err, res) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
cb(err);
|
||||
} else {
|
||||
var indexes = {};
|
||||
// Note: this will only show the first key of compound keys
|
||||
res.forEach(function (index) {
|
||||
if (parseInt(index.Seq_in_index, 10) == 1) {
|
||||
indexes[index.Key_name] = index
|
||||
}
|
||||
});
|
||||
});
|
||||
cb(err, indexes);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -5,26 +5,26 @@ var assert = require('assert');
|
|||
var DataSource = require('loopback-datasource-juggler').DataSource;
|
||||
var db;
|
||||
|
||||
before(function() {
|
||||
var config = require('rc')('loopback', {dev: {mysql: {}}}).dev.mysql;
|
||||
config.database = 'STRONGLOOP';
|
||||
db = new DataSource(require('../'), config);
|
||||
before(function () {
|
||||
var config = require('rc')('loopback', {dev: {mysql: {}}}).dev.mysql;
|
||||
config.database = 'STRONGLOOP';
|
||||
db = new DataSource(require('../'), config);
|
||||
});
|
||||
|
||||
describe('discoverModels', function() {
|
||||
describe('Discover models including views', function() {
|
||||
it('should return an array of tables and views', function(done) {
|
||||
describe('discoverModels', function () {
|
||||
describe('Discover models including views', function () {
|
||||
it('should return an array of tables and views', function (done) {
|
||||
|
||||
db.discoverModelDefinitions({
|
||||
views : true,
|
||||
limit : 3
|
||||
}, function(err, models) {
|
||||
views: true,
|
||||
limit: 3
|
||||
}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
var views = false;
|
||||
models.forEach(function(m) {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
if (m.type === 'view') {
|
||||
views = true;
|
||||
|
@ -37,19 +37,19 @@ describe('discoverModels', function() {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Discover models excluding views', function() {
|
||||
it('should return an array of only tables', function(done) {
|
||||
describe('Discover models excluding views', function () {
|
||||
it('should return an array of only tables', function (done) {
|
||||
|
||||
db.discoverModelDefinitions({
|
||||
views : false,
|
||||
limit : 3
|
||||
}, function(err, models) {
|
||||
views: false,
|
||||
limit: 3
|
||||
}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
var views = false;
|
||||
models.forEach(function(m) {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
if (m.type === 'view') {
|
||||
views = true;
|
||||
|
@ -64,19 +64,19 @@ describe('discoverModels', function() {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Discover models including other users', function() {
|
||||
it('should return an array of all tables and views', function(done) {
|
||||
describe('Discover models including other users', function () {
|
||||
it('should return an array of all tables and views', function (done) {
|
||||
|
||||
db.discoverModelDefinitions({
|
||||
all : true,
|
||||
limit : 3
|
||||
}, function(err, models) {
|
||||
all: true,
|
||||
limit: 3
|
||||
}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
var others = false;
|
||||
models.forEach(function(m) {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
if (m.owner !== 'STRONGLOOP') {
|
||||
others = true;
|
||||
|
@ -89,15 +89,15 @@ describe('Discover models including other users', function() {
|
|||
});
|
||||
});
|
||||
|
||||
describe('Discover model properties', function() {
|
||||
describe('Discover a named model', function() {
|
||||
it('should return an array of columns for PRODUCT', function(done) {
|
||||
db.discoverModelProperties('PRODUCT', function(err, models) {
|
||||
describe('Discover model properties', function () {
|
||||
describe('Discover a named model', function () {
|
||||
it('should return an array of columns for PRODUCT', function (done) {
|
||||
db.discoverModelProperties('PRODUCT', function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function(m) {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.tableName === 'PRODUCT');
|
||||
});
|
||||
|
@ -110,88 +110,88 @@ describe('Discover model properties', function() {
|
|||
});
|
||||
|
||||
describe('Discover model primary keys', function () {
|
||||
it('should return an array of primary keys for PRODUCT', function (done) {
|
||||
db.discoverPrimaryKeys('PRODUCT',function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.tableName === 'PRODUCT');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
it('should return an array of primary keys for PRODUCT', function (done) {
|
||||
db.discoverPrimaryKeys('PRODUCT', function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.tableName === 'PRODUCT');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an array of primary keys for STRONGLOOP.PRODUCT', function (done) {
|
||||
db.discoverPrimaryKeys('PRODUCT', {owner: 'STRONGLOOP'}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.tableName === 'PRODUCT');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
it('should return an array of primary keys for STRONGLOOP.PRODUCT', function (done) {
|
||||
db.discoverPrimaryKeys('PRODUCT', {owner: 'STRONGLOOP'}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.tableName === 'PRODUCT');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Discover model foreign keys', function () {
|
||||
it('should return an array of foreign keys for INVENTORY', function (done) {
|
||||
db.discoverForeignKeys('INVENTORY',function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.fkTableName === 'INVENTORY');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
it('should return an array of foreign keys for INVENTORY', function (done) {
|
||||
db.discoverForeignKeys('INVENTORY', function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.fkTableName === 'INVENTORY');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
});
|
||||
it('should return an array of foreign keys for STRONGLOOP.INVENTORY', function (done) {
|
||||
db.discoverForeignKeys('INVENTORY', {owner: 'STRONGLOOP'}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.fkTableName === 'INVENTORY');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
});
|
||||
it('should return an array of foreign keys for STRONGLOOP.INVENTORY', function (done) {
|
||||
db.discoverForeignKeys('INVENTORY', {owner: 'STRONGLOOP'}, function (err, models) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
done(err);
|
||||
} else {
|
||||
models.forEach(function (m) {
|
||||
// console.dir(m);
|
||||
assert(m.fkTableName === 'INVENTORY');
|
||||
});
|
||||
done(null, models);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Discover LDL schema from a table', function () {
|
||||
it('should return an LDL schema for INVENTORY', function (done) {
|
||||
db.discoverSchema('INVENTORY', {owner: 'STRONGLOOP'}, function (err, schema) {
|
||||
// console.log('%j', schema);
|
||||
assert(schema.name === 'Inventory');
|
||||
assert(schema.options.mysql.schema === 'STRONGLOOP');
|
||||
assert(schema.options.mysql.table === 'INVENTORY');
|
||||
assert(schema.properties.productId);
|
||||
assert(schema.properties.productId.type === 'String');
|
||||
assert(schema.properties.productId.mysql.columnName === 'PRODUCT_ID');
|
||||
assert(schema.properties.locationId);
|
||||
assert(schema.properties.locationId.type === 'String');
|
||||
assert(schema.properties.locationId.mysql.columnName === 'LOCATION_ID');
|
||||
assert(schema.properties.available);
|
||||
assert(schema.properties.available.type === 'Number');
|
||||
assert(schema.properties.total);
|
||||
assert(schema.properties.total.type === 'Number');
|
||||
done(null, schema);
|
||||
});
|
||||
it('should return an LDL schema for INVENTORY', function (done) {
|
||||
db.discoverSchema('INVENTORY', {owner: 'STRONGLOOP'}, function (err, schema) {
|
||||
// console.log('%j', schema);
|
||||
assert(schema.name === 'Inventory');
|
||||
assert(schema.options.mysql.schema === 'STRONGLOOP');
|
||||
assert(schema.options.mysql.table === 'INVENTORY');
|
||||
assert(schema.properties.productId);
|
||||
assert(schema.properties.productId.type === 'String');
|
||||
assert(schema.properties.productId.mysql.columnName === 'PRODUCT_ID');
|
||||
assert(schema.properties.locationId);
|
||||
assert(schema.properties.locationId.type === 'String');
|
||||
assert(schema.properties.locationId.mysql.columnName === 'LOCATION_ID');
|
||||
assert(schema.properties.available);
|
||||
assert(schema.properties.available.type === 'Number');
|
||||
assert(schema.properties.total);
|
||||
assert(schema.properties.total.type === 'Number');
|
||||
done(null, schema);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Discover and build models', function () {
|
||||
|
|
Loading…
Reference in New Issue