Schema support for add/mofify

This commit is contained in:
Mark Cavage 2011-08-19 15:08:23 -07:00
parent 3e423e5b1e
commit 11fbda69e7
12 changed files with 829 additions and 125 deletions

View File

@ -130,7 +130,7 @@ function Client(options) {
this.__defineGetter__('log', function() { this.__defineGetter__('log', function() {
if (!self._log) if (!self._log)
self._log = self.log4js.getLogger('LDAPClient'); self._log = self.log4js.getLogger('Client');
return self._log; return self._log;
}); });

View File

@ -53,21 +53,24 @@ module.exports = {
return new Server(options); return new Server(options);
}, },
dn: dn,
DN: dn.DN,
RDN: dn.RDN,
parseDN: dn.parse,
filters: filters,
parseFilter: filters.parseString,
Attribute: Attribute, Attribute: Attribute,
Change: Change, Change: Change,
Control: Control, Control: Control,
DN: dn.DN,
RDN: dn.RDN,
parseDN: dn.parse,
dn: dn,
filters: filters,
parseFilter: filters.parseString,
log4js: logStub, log4js: logStub,
url: url, parseURL: url.parse,
parseURL: url.parse
loadSchema: schema.load,
createSchemaAddHandler: schema.createAddHandler,
createSchemaModifyHandler: schema.createModifyHandler
}; };

View File

@ -79,7 +79,8 @@ SearchEntry.prototype.fromObject = function(obj) {
if (Array.isArray(obj[k])) { if (Array.isArray(obj[k])) {
obj[k].forEach(function(v) { obj[k].forEach(function(v) {
if (typeof(v) !== 'string') if (typeof(v) !== 'string')
throw new TypeError(k + ' -> ' + v + ' is not a string'); v = '' + v;
attr.vals.push(v); attr.vals.push(v);
}); });
} else if (typeof(obj[k]) === 'string') { } else if (typeof(obj[k]) === 'string') {

View File

@ -1,106 +0,0 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var fs = require('fs');
var Protocol = require('./protocol');
var errors = require('./errors');
///--- API
module.exports = {
/**
*
* Supports definition of objectclasses like so:
* {
* person: {
* required: ['cn', 'sn'],
* optional: ['email']
* }
* }
*/
loadSchema: function(file, callback) {
if (!file || typeof(file) !== 'string')
throw new TypeError('file (string) required');
if (typeof(callback) !== 'function')
throw new TypeError('callback (function) required');
return fs.readFile(file, 'utf8', function(err, data) {
if (err)
return callback(err);
try {
return callback(null, JSON.parse(data));
} catch (e) {
return callback(e);
}
});
},
newInterceptor: function(schema) {
if (typeof(schema) !== 'object')
throw new TypeError('schema (object) required');
// Add/Modify request already have attributes sorted
return function(req, res, next) {
switch (req.protocolOp) {
case Protocol.LDAP_REQ_ADD:
var ocNdx = req.indexOf('objectclass');
if (ocNdx === -1)
return next(new errors.ConstraintViolation('objectclass'));
var reqOC = req.attributes[ocNdx];
// First make the "set" of required/optional attributes for all OCs in
// the union of all OCs. We destroy these arrays after the fact. Note
// that optional will get the set of attributes also not already in
// required, since we figure this out by destructively changing the
// list of attribute names.
var required = [];
var optional = [];
var i, j;
for (i = 0; i < reqOC.vals.length; i++) {
var oc = schema[reqOC.vals[i]];
if (!oc)
return next(new errors.UndefinedAttributeType(reqOC.vals[i]));
for (j = 0; j < oc.required.length; j++) {
if (required.indexOf(oc.required[j]) === -1)
required.push(oc.required[j]);
}
for (j = 0; j < oc.optional.length; j++) {
if (optional.indexOf(oc.optional[j]) === -1 &&
required.indexOf(oc.optional[j]) === -1)
optional.push(oc.optional[j]);
}
}
// Make a copy of just the attribute names
var attrs = req.attributeNames();
for (i = 0; i < attrs.length; i++) {
var ndx = required.indexOf(attrs[i]);
if (ndx === -1) {
ndx = optional.indexOf(attrs[i]);
if (ndx == -1)
return next(new errors.ConstraintViolation(attrs[i]));
}
attrs.splice(i, 1);
}
if (attrs.length)
return next(new errors.ConstraintViolation(attrs.join()));
break;
case Protocol.LDAP_REQ_MODIFY:
break;
default:
return next();
}
}
}
};

115
lib/schema/add_handler.js Normal file
View File

@ -0,0 +1,115 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var dn = require('../dn');
var errors = require('../errors');
var logStub = require('../log_stub');
var getTransformer = require('./transform').getTransformer;
function createAddHandler(options) {
if (!options || typeof(options) !== 'object')
throw new TypeError('options (object) required');
if (!options.schema || typeof(options.schema) !== 'object')
throw new TypeError('options.schema (object) required');
var log4js = options.log4js || logStub;
var log = log4js.getLogger('SchemaAddHandler');
var schema = options.schema;
if (log.isDebugEnabled())
log.debug('Creating add schema handler with: %s',
JSON.stringify(options.schema, null, 2));
var CVErr = errors.ConstraintViolationError;
var NSAErr = errors.NoSuchAttributeError;
var OCVErr = errors.ObjectclassViolationError;
return function schemaAddHandler(req, res, next) {
var allowed = [];
var attributes = req.toObject().attributes;
var attrNames = Object.keys(attributes);
var i;
var j;
var k;
var key;
if (log.isDebugEnabled())
log.debug('%s running %j against schema', req.logId, attributes);
if (!attributes.objectclass)
return next(new OCVErr('no objectclass'));
for (i = 0; i < attributes.objectclass.length; i++) {
var oc = attributes.objectclass[i].toLowerCase();
if (!schema.objectclasses[oc])
return next(new NSAErr(oc + ' is not a known objectClass'));
// We can check required attributes right here in line. Mays we have to
// get the complete set of though. Also, to make checking much simpler,
// we just push the musts into the may list.
var must = schema.objectclasses[oc].must;
for (j = 0; j < must.length; j++) {
if (attrNames.indexOf(must[j]) === -1)
return next(new OCVErr(must[j] + ' is a required attribute'));
if (allowed.indexOf(must[j]) === -1)
allowed.push(must[j]);
}
schema.objectclasses[oc].may.forEach(function(attr) {
if (allowed.indexOf(attr) === -1)
allowed.push(attr);
});
}
// Now check that the entry's attributes are in the allowed list, and go
// ahead and transform the values as appropriate
for (i = 0; i < attrNames.length; i++) {
key = attrNames[i];
if (allowed.indexOf(key) === -1)
return next(new OCVErr(key + ' is not valid for the objectClasses ' +
attributes.objectclass.join()));
var transform = getTransformer(schema, key);
if (transform) {
for (j = 0; j < attributes[key].length; j++) {
try {
attributes[key][j] = transform(attributes[key][j]);
} catch (e) {
log.debug('%s Error parsing %s: %s', req.logId, k,
attributes[key][j],
e.stack);
return next(new CVErr(attrNames[i]));
}
}
for (j = 0; j < req.attributes.length; j++) {
if (req.attributes[j].type === key) {
req.attributes[j].vals = attributes[key];
break;
}
}
}
}
return next();
};
}
module.exports = createAddHandler;
// Now we have a modified attributes object we want to update
// "transparently" in the request.
// if (xformedValues) {
// attrNames.forEach(function(k) {
// for (var i = 0; i < req.attributes.length; i++) {
// if (req.attributes[i].type === k) {
// req.attributes[i].vals = attributes[k];
// return;
// }
// }
// });
// }

19
lib/schema/index.js Normal file
View File

@ -0,0 +1,19 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var createAddHandler = require('./add_handler');
var createModifyHandler = require('./mod_handler');
var parser = require('./parser');
///--- API
module.exports = {
createAddHandler: createAddHandler,
createModifyHandler: createModifyHandler,
load: parser.load
};

59
lib/schema/mod_handler.js Normal file
View File

@ -0,0 +1,59 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var dn = require('../dn');
var errors = require('../errors');
var logStub = require('../log_stub');
var getTransformer = require('./transform').getTransformer;
function createModifyHandler(options) {
if (!options || typeof(options) !== 'object')
throw new TypeError('options (object) required');
if (!options.schema || typeof(options.schema) !== 'object')
throw new TypeError('options.schema (object) required');
// TODO add a callback mechanism here so objectclass constraints can be
// enforced
var log4js = options.log4js || logStub;
var log = log4js.getLogger('SchemaModifyHandler');
var schema = options.schema;
var CVErr = errors.ConstraintViolationError;
var NSAErr = errors.NoSuchAttributeError;
var OCVErr = errors.ObjectclassViolationError;
return function schemaModifyHandler(req, res, next) {
if (log.isDebugEnabled())
log.debug('%s running %j against schema', req.logId, req.changes);
for (var i = 0; i < req.changes.length; i++) {
var mod = req.changes[i].modification;
var attribute = schema.attributes[mod.type];
if (!attribute)
return next(new NSAErr(mod.type));
if (!mod.vals || !mod.vals.length)
continue;
var transform = getTransformer(schema, mod.type);
if (transform) {
for (var j = 0; j < mod.vals.length; j++) {
try {
mod.vals[j] = transform(mod.vals[j]);
} catch (e) {
log.debug('%s Error parsing %s: %s', req.logId, mod.vals[j],
e.stack);
return next(new CVErr(mod.type + ': ' + mod.vals[j]));
}
}
}
}
return next();
}
}
module.exports = createModifyHandler;

437
lib/schema/parser.js Normal file
View File

@ -0,0 +1,437 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var fs = require('fs');
var dn = require('../dn');
var errors = require('../errors');
var logStub = require('../log_stub');
//// Attribute BNF
//
// AttributeTypeDescription = "(" whsp
// numericoid whsp ; AttributeType identifier
// [ "NAME" qdescrs ] ; name used in AttributeType
// [ "DESC" qdstring ] ; description
// [ "OBSOLETE" whsp ]
// [ "SUP" woid ] ; derived from this other
// ; AttributeType
// [ "EQUALITY" woid ; Matching Rule name
// [ "ORDERING" woid ; Matching Rule name
// [ "SUBSTR" woid ] ; Matching Rule name
// [ "SYNTAX" whsp noidlen whsp ] ; Syntax OID
// [ "SINGLE-VALUE" whsp ] ; default multi-valued
// [ "COLLECTIVE" whsp ] ; default not collective
// [ "NO-USER-MODIFICATION" whsp ]; default user modifiable
// [ "USAGE" whsp AttributeUsage ]; default userApplications
// whsp ")"
//
// AttributeUsage =
// "userApplications" /
// "directoryOperation" /
// "distributedOperation" / ; DSA-shared
// "dSAOperation" ; DSA-specific, value depends on server
/// Objectclass BNF
//
// ObjectClassDescription = "(" whsp
// numericoid whsp ; ObjectClass identifier
// [ "NAME" qdescrs ]
// [ "DESC" qdstring ]
// [ "OBSOLETE" whsp ]
// [ "SUP" oids ] ; Superior ObjectClasses
// [ ( "ABSTRACT" / "STRUCTURAL" / "AUXILIARY" ) whsp ]
// ; default structural
// [ "MUST" oids ] ; AttributeTypes
// [ "MAY" oids ] ; AttributeTypes
// whsp ")"
// This is some fugly code, and really not that robust, but LDAP schema
// is a pita with its optional ('s. So, whatever, it's good enough for our
// purposes (namely, dropping in the OpenLDAP schema). This took me a little
// over an hour to write, so there you go ;)
function parse(data) {
if (!data || typeof(data) !== 'string')
throw new TypeError('data (string) required');
var lines = [];
data.split('\n').forEach(function(l) {
if (/^#/.test(l) ||
/^objectidentifier/i.test(l) ||
!l.length)
return;
lines.push(l);
});
var attr;
var oc;
var syntax;
var attributes = [];
var objectclasses = [];
var depth = 0;
lines.join('\n').split(/\s+/).forEach(function(w) {
if (attr) {
if (w === '(') {
depth++;
} else if (w === ')') {
if (--depth === 0) {
if (attr._skip)
delete attr._skip;
attributes.push(attr);
attr = null;
}
return;
} else if (!attr.oid) {
attr.oid = w;
} else if (w === 'NAME') {
attr._names = [];
} else if (w === 'DESC') {
attr._desc = '';
} else if (w === 'OBSOLETE') {
attr.obsolete = true;
} else if (w === 'SUP') {
attr._sup = true;
} else if (attr._sup) {
attr.sup = w;
delete attr._sup;
} else if (w === 'EQUALITY') {
attr._equality = true;
} else if (w === 'ORDERING') {
attr._ordering = true;
} else if (w === 'SUBSTR') {
attr._substr = true;
} else if (w === 'SYNTAX') {
attr._syntax = true;
} else if (w === 'SINGLE-VALUE') {
attr.singleValue = true;
} else if (w === 'COLLECTIVE') {
attr.collective = true;
} else if (w === 'NO-USER-MODIFICATION') {
attr.noUserModification = true;
} else if (w === 'USAGE') {
attr._usage = true;
} else if (/^X-/.test(w)) {
attr._skip = true;
} else if (attr._skip) {
// noop
} else if (attr._usage) {
attr.usage = w;
delete attr._usage;
} else if (attr._syntax) {
attr.syntax = w;
delete attr._syntax;
} else if (attr._substr) {
attr.substr = w;
delete attr._substr;
} else if (attr._ordering) {
attr.ordering = w;
delete attr._ordering;
} else if (attr._equality) {
attr.equality = w;
delete attr._equality;
} else if (attr._desc !== undefined) {
attr._desc += w.replace(/\'/g, '');
if (/\'$/.test(w)) {
attr.desc = attr._desc;
delete attr._desc;
} else {
attr._desc += ' ';
}
} else if (attr._names) {
attr._names.push(w.replace(/\'/g, '').toLowerCase());
}
return;
}
if (oc) {
if (w === '(') {
depth++;
} else if (w === ')') {
if (--depth === 0) {
objectclasses.push(oc);
oc = null;
}
return;
} else if (w === '$') {
return;
} else if (!oc.oid) {
oc.oid = w;
} else if (w === 'NAME') {
oc._names = [];
} else if (w === 'DESC') {
oc._desc = '';
} else if (w === 'OBSOLETE') {
oc.obsolete = true;
} else if (w === 'SUP') {
oc._sup = [];
} else if (w === 'ABSTRACT') {
oc['abstract'] = true;
} else if (w === 'AUXILIARY') {
oc.auxiliary = true;
} else if (w === 'STRUCTURAL') {
oc.structural = true;
} else if (w === 'MUST') {
oc._must = [];
} else if (w === 'MAY') {
oc._may = [];
} else if (oc._may) {
oc._may.push(w.toLowerCase());
} else if (oc._must) {
oc._must.push(w.toLowerCase());
} else if (oc._sup) {
oc._sup.push(w.replace(/\'/g, '').toLowerCase());
} else if (oc._desc !== undefined) {
oc._desc += w.replace(/\'/g, '');
if (/\'$/.test(w)) {
oc.desc = oc._desc;
delete oc._desc;
} else {
oc._desc += ' ';
}
} else if (oc._names) {
oc._names.push(w.replace(/\'/g, '').toLowerCase());
}
return;
}
// Throw this away for now.
if (syntax) {
if (w === '(') {
depth++;
} else if (w === ')') {
if (--depth === 0) {
syntax = false;
}
}
return;
}
if (/^attributetype/i.test(w)) {
attr = {};
} else if (/^objectclass/i.test(w)) {
oc = {};
} else if (/^ldapsyntax/i.test(w)) {
syntax = true;
} else if (!w) {
// noop
} else {
throw new Error('Invalid token ' + w + ' in file ' + file);
}
});
// cleanup all the temporary arrays
var i;
for (i = 0; i < attributes.length; i++) {
if (!attributes[i]._names)
continue;
attributes[i].names = attributes[i]._names;
delete attributes[i]._names;
}
for (i = 0; i < objectclasses.length; i++) {
oc = objectclasses[i];
if (oc._names) {
oc.names = oc._names;
delete oc._names;
} else {
oc.names = [];
}
if (oc._sup) {
oc.sup = oc._sup;
delete oc._sup;
} else {
oc.sup = [];
}
if (oc._must) {
oc.must = oc._must;
delete oc._must;
} else {
oc.must = [];
}
if (oc._may) {
oc.may = oc._may;
delete oc._may;
} else {
oc.may = [];
}
}
var _attributes = {};
var _objectclasses = {};
attributes.forEach(function(a) {
for (var i = 0; i < a.names.length; i++) {
a.names[i] = a.names[i].toLowerCase();
_attributes[a.names[i]] = a;
}
});
objectclasses.forEach(function(oc) {
for (var i = 0; i < oc.names.length; i++) {
oc.names[i] = oc.names[i].toLowerCase();
_objectclasses[oc.names[i]] = oc;
}
});
return {
attributes: _attributes,
objectclasses: _objectclasses
};
}
function parseFile(file, callback) {
if (!file || typeof(file) !== 'string')
throw new TypeError('file (string) required');
if (!callback || typeof(callback) !== 'function')
throw new TypeError('callback (function) required');
fs.readFile(file, 'utf8', function(err, data) {
if (err)
return callback(new errors.OperationsError(err.message));
try {
return callback(null, parse(data));
} catch (e) {
return callback(new errors.OperationsError(e.message));
}
});
}
function _merge(child, parent) {
Object.keys(parent).forEach(function(k) {
if (Array.isArray(parent[k])) {
if (k === 'names' || k === 'sup')
return;
if (!child[k])
child[k] = [];
parent[k].forEach(function(v) {
if (child[k].indexOf(v) === -1)
child[k].push(v);
});
} else if (!child[k]) {
child[k] = parent[k];
}
});
return child;
}
function compile(attributes, objectclasses) {
assert.ok(attributes);
assert.ok(objectclasses);
var _attributes = {};
var _objectclasses = {};
Object.keys(attributes).forEach(function(k) {
_attributes[k] = attributes[k];
var sup;
if (attributes[k].sup && (sup = attributes[attributes[k].sup]))
_attributes[k] = _merge(_attributes[k], sup);
_attributes[k].names.sort();
});
Object.keys(objectclasses).forEach(function(k) {
_objectclasses[k] = objectclasses[k];
var sup;
if (objectclasses[k].sup && (sup = objectclasses[objectclasses[k].sup]))
_objectclasses[k] = _merge(_objectclasses[k], sup);
_objectclasses[k].names.sort();
_objectclasses[k].sup.sort();
_objectclasses[k].must.sort();
_objectclasses[k].may.sort();
});
return {
attributes: _attributes,
objectclasses: _objectclasses
};
}
/**
* Loads all the `.schema` files in a directory, and parses them.
*
* This method returns the set of schema from all files, and the "last one"
* wins, so don't do something stupid like have the same attribute defined
* N times with varying definitions.
*
* @param {String} directory the directory of *.schema files to load.
* @param {Function} callback of the form f(err, attributes, objectclasses).
* @throws {TypeEror} on bad input.
*/
function load(directory, callback) {
if (!directory || typeof(directory) !== 'string')
throw new TypeError('directory (string) required');
if (!callback || typeof(callback) !== 'function')
throw new TypeError('callback (function) required');
fs.readdir(directory, function(err, files) {
if (err)
return callback(new errors.OperationsError(err.message));
var finished = 0;
var attributes = {};
var objectclasses = {};
files.forEach(function(f) {
if (!/\.schema$/.test(f)) {
++finished;
return;
}
f = directory + '/' + f;
parseFile(f, function(err, schema) {
var cb = callback;
if (err) {
callback = null;
if (cb)
return cb(new errors.OperationsError(err.message));
return;
}
Object.keys(schema.attributes).forEach(function(a) {
attributes[a] = schema.attributes[a];
});
Object.keys(schema.objectclasses).forEach(function(oc) {
objectclasses[oc] = schema.objectclasses[oc];
});
if (++finished === files.length) {
if (cb) {
schema = compile(attributes, objectclasses);
return cb(null, schema);
}
}
});
});
});
}
///--- Exported API
module.exports = {
load: load,
parse: parse,
parseFile: parseFile
};

View File

@ -0,0 +1,37 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var dn = require('../dn');
var errors = require('../errors');
var logStub = require('../log_stub');
var getTransformer = require('./transform').getTransformer;
function createSearchHandler(options) {
if (!options || typeof(options) !== 'object')
throw new TypeError('options (object) required');
if (!options.schema || typeof(options.schema) !== 'object')
throw new TypeError('options.schema (object) required');
// TODO add a callback mechanism here so objectclass constraints can be
// enforced
var log4js = options.log4js || logStub;
var log = log4js.getLogger('SchemaSearchHandler');
var schema = options.schema;
var CVErr = errors.ConstraintViolationError;
var NSAErr = errors.NoSuchAttributeError;
var OCVErr = errors.ObjectclassViolationError;
return function schemaSearchHandler(req, res, next) {
if (log.isDebugEnabled())
log.debug('%s running %j against schema', req.logId, req.filter);
return next();
}
}
module.exports = createSearchHandler;

139
lib/schema/transform.js Normal file
View File

@ -0,0 +1,139 @@
// Copyright 2011 Mark Cavage, Inc. All rights reserved.
var assert = require('assert');
var dn = require('../dn');
///--- API
function _getTransformer(syntax) {
assert.ok(syntax);
// TODO size enforcement
if (/\}$/.test(syntax))
syntax = syntax.replace(/\{.+\}$/, '');
switch (syntax) {
case '1.3.6.1.4.1.1466.115.121.1.27': // int
case '1.3.6.1.4.1.1466.115.121.1.36': // numeric string
return function(value) {
return parseInt(value, 10);
};
case '1.3.6.1.4.1.1466.115.121.1.7': // boolean
return function(value) {
return /^true$/i.test(value);
};
case '1.3.6.1.4.1.1466.115.121.1.5': // binary
return function(value) {
return new Buffer(value).toString('base64');
};
case '1.3.6.1.4.1.1466.115.121.1.12': // dn syntax
return function(value) {
return dn.parse(value).toString();
};
default:
// noop
}
return null;
}
function getTransformer(schema, type) {
assert.ok(schema);
assert.ok(type);
if (!schema.attributes[type] || !schema.attributes[type].syntax)
return null;
return _getTransformer(schema.attributes[type].syntax);
}
function transformValue(schema, type, value) {
assert.ok(schema);
assert.ok(type);
assert.ok(value);
if (!schema.attributes[type] || !schema.attributes[type].syntax)
return value;
var transformer = _getTransformer(schema.attributes[type].syntax);
return transformer ? transformer(value) : null;
}
function transformObject(schema, attributes, keys) {
assert.ok(schema);
assert.ok(attributes);
if (!keys)
keys = Object.keys(attributes);
var xformed = false;
keys.forEach(function(k) {
k = k.toLowerCase();
var transform = _getTransformer(schema.attributes[k].syntax);
if (transform) {
xformed = true;
var vals = attributes[k];
console.log('%s -> %j', k, vals);
for (var i = 0; i < vals.length; i++)
vals[i] = transform(vals[i]);
}
});
return xformed;
}
module.exports = {
transformObject: transformObject,
transformValue: transformValue,
getTransformer: getTransformer
};
// var syntax = schema.attributes[k].syntax;
// if (/\}$/.test(syntax))
// syntax = syntax.replace(/\{.+\}$/, '');
// switch (syntax) {
// case '1.3.6.1.4.1.1466.115.121.1.27': // int
// case '1.3.6.1.4.1.1466.115.121.1.36': // numeric string
// for (j = 0; j < attr.length; j++)
// attr[j] = parseInt(attr[j], 10);
// xformed = true;
// break;
// case '1.3.6.1.4.1.1466.115.121.1.7': // boolean
// for (j = 0; j < attr.length; j++)
// attr[j] = /^true$/i.test(attr[j]);
// xformed = true;
// break;
// case '1.3.6.1.4.1.1466.115.121.1.5': // binary
// for (j = 0; j < attr.length; j++)
// attr[j] = new Buffer(attr[j]).toString('base64');
// xformed = true;
// break;
// case '1.3.6.1.4.1.1466.115.121.1.12': // dn syntax
// for (j = 0; j < attr.length; j++)
// attr[j] = dn.parse(attr[j]).toString();
// xformed = true;
// break;
// default:
// // noop
// }

View File

@ -205,7 +205,7 @@ function Server(options) {
EventEmitter.call(this, options); EventEmitter.call(this, options);
this.log = options.log4js.getLogger('LDAPServer'); this.log = options.log4js.getLogger('Server');
var log = this.log; var log = this.log;
function setupConnection(c) { function setupConnection(c) {

View File

@ -13,15 +13,15 @@ var url;
///--- Tests ///--- Tests
test('load library', function(t) { test('load library', function(t) {
url = require('../lib/index').url; parseURL = require('../lib/index').parseURL;
t.ok(url); t.ok(parseURL);
t.end(); t.end();
}); });
test('parse empty', function(t) { test('parse empty', function(t) {
var u = url.parse('ldap:///'); var u = parseURL('ldap:///');
t.equal(u.hostname, 'localhost'); t.equal(u.hostname, 'localhost');
t.equal(u.port, 389); t.equal(u.port, 389);
t.ok(!u.DN); t.ok(!u.DN);
@ -32,7 +32,7 @@ test('parse empty', function(t) {
test('parse hostname', function(t) { test('parse hostname', function(t) {
var u = url.parse('ldap://example.com/'); var u = parseURL('ldap://example.com/');
t.equal(u.hostname, 'example.com'); t.equal(u.hostname, 'example.com');
t.equal(u.port, 389); t.equal(u.port, 389);
t.ok(!u.DN); t.ok(!u.DN);
@ -43,7 +43,7 @@ test('parse hostname', function(t) {
test('parse host and port', function(t) { test('parse host and port', function(t) {
var u = url.parse('ldap://example.com:1389/'); var u = parseURL('ldap://example.com:1389/');
t.equal(u.hostname, 'example.com'); t.equal(u.hostname, 'example.com');
t.equal(u.port, 1389); t.equal(u.port, 1389);
t.ok(!u.DN); t.ok(!u.DN);
@ -55,7 +55,7 @@ test('parse host and port', function(t) {
test('parse full', function(t) { test('parse full', function(t) {
var u = url.parse('ldaps://ldap.example.com:1389/dc=example%20,dc=com' + var u = parseURL('ldaps://ldap.example.com:1389/dc=example%20,dc=com' +
'?cn,sn?sub?(cn=Babs%20Jensen)'); '?cn,sn?sub?(cn=Babs%20Jensen)');
t.equal(u.secure, true); t.equal(u.secure, true);