loopback-component-storage/lib/storage-handler.js

274 lines
7.2 KiB
JavaScript
Raw Normal View History

2016-05-03 23:18:18 +00:00
// Copyright IBM Corp. 2013,2015. All Rights Reserved.
// Node module: loopback-component-storage
// This file is licensed under the Artistic License 2.0.
// License text available at https://opensource.org/licenses/Artistic-2.0
2016-07-26 18:13:30 +00:00
// Globalization
var g = require('strong-globalize')();
var IncomingForm = require('formidable');
var StringDecoder = require('string_decoder').StringDecoder;
2016-06-14 17:20:24 +00:00
var path = require('path');
var defaultOptions = {
maxFileSize: 10 * 1024 * 1024 // 10 MB
};
2013-07-01 17:54:53 +00:00
/**
* Handle multipart/form-data upload to the storage service
2014-04-07 23:17:19 +00:00
* @param {Object} provider The storage service provider
2014-01-10 19:34:37 +00:00
* @param {Request} req The HTTP request
* @param {Response} res The HTTP response
* @param {Object} [options] The container name
2014-04-07 23:17:19 +00:00
* @callback {Function} cb Callback function
2015-11-24 01:16:48 +00:00
* @header storageService.upload(provider, req, res, options, cb)
2013-07-01 17:54:53 +00:00
*/
2015-11-24 01:16:48 +00:00
exports.upload = function(provider, req, res, options, cb) {
if (!cb && 'function' === typeof options) {
cb = options;
options = {};
}
if (!options.maxFileSize) {
options.maxFileSize = defaultOptions.maxFileSize;
}
var form = new IncomingForm(options);
2015-06-08 22:30:43 +00:00
var container = options.container || req.params.container;
2014-01-24 17:44:58 +00:00
var fields = {}, files = {};
2015-11-24 01:16:48 +00:00
form.handlePart = function(part) {
2014-01-24 17:44:58 +00:00
var self = this;
if (part.filename === undefined) {
var value = ''
, decoder = new StringDecoder(this.encoding);
2015-11-24 01:16:48 +00:00
part.on('data', function(buffer) {
2014-01-24 17:44:58 +00:00
self._fieldsSize += buffer.length;
if (self._fieldsSize > self.maxFieldsSize) {
2016-07-26 18:13:30 +00:00
self._error(new Error(g.f('{{maxFieldsSize}} exceeded, received %s bytes of field data', self._fieldsSize)));
2014-01-24 17:44:58 +00:00
return;
}
2014-01-24 17:44:58 +00:00
value += decoder.write(buffer);
});
2015-11-24 01:16:48 +00:00
part.on('end', function() {
2014-01-24 17:44:58 +00:00
var values = fields[part.name];
if (values === undefined) {
values = [value];
fields[part.name] = values;
} else {
values.push(value);
}
self.emit('field', part.name, value);
});
return;
}
2014-01-24 17:44:58 +00:00
this._flushing++;
2013-06-27 15:31:03 +00:00
2014-01-24 17:44:58 +00:00
var file = {
container: container,
name: part.filename,
type: part.mime
};
2013-06-27 15:31:03 +00:00
// Options for this file
// Build a filename
if ('function' === typeof options.getFilename) {
file.originalFilename = file.name;
file.name = options.getFilename(file, req, res);
}
// Get allowed mime types
if (options.allowedContentTypes) {
var allowedContentTypes;
if ('function' === typeof options.allowedContentTypes) {
allowedContentTypes = options.allowedContentTypes(file, req, res);
} else {
allowedContentTypes = options.allowedContentTypes;
}
if (Array.isArray(allowedContentTypes) && allowedContentTypes.length !== 0) {
if (allowedContentTypes.indexOf(file.type) === -1) {
2016-07-26 18:13:30 +00:00
self._error(new Error(g.f('{{contentType}} "%s" is not allowed (Must be in [%s])', file.type, allowedContentTypes.join(', '))));
return;
}
}
}
2014-01-24 17:44:58 +00:00
// Get max file size
var maxFileSize;
if (options.maxFileSize) {
if ('function' === typeof options.maxFileSize) {
maxFileSize = options.maxFileSize(file, req, res);
} else {
maxFileSize = options.maxFileSize;
}
2014-01-24 17:44:58 +00:00
}
// Get access control list
if (options.acl) {
if ('function' === typeof options.acl) {
file.acl = options.acl(file, req, res);
} else {
file.acl = options.acl;
}
}
self.emit('fileBegin', part.name, file);
2015-11-24 01:16:48 +00:00
var uploadParams = {
container: container,
remote: file.name,
contentType: file.type
};
if (file.acl) {
uploadParams.acl = file.acl;
}
var writer = provider.upload(uploadParams);
2014-01-24 17:44:58 +00:00
2015-02-27 19:06:49 +00:00
writer.on('error', function(err) {
2015-07-31 19:05:55 +00:00
self.emit('error', err);
2015-02-27 19:06:49 +00:00
});
2015-11-24 01:16:48 +00:00
var endFunc = function() {
2014-01-24 17:44:58 +00:00
self._flushing--;
var values = files[part.name];
if (values === undefined) {
values = [file];
files[part.name] = values;
} else {
values.push(file);
}
self.emit('file', part.name, file);
self._maybeEnd();
};
2015-11-24 01:16:48 +00:00
writer.on('success', function(file) {
2015-06-01 07:55:04 +00:00
endFunc();
});
var fileSize = 0;
if (maxFileSize) {
2015-11-24 01:16:48 +00:00
part.on('data', function(buffer) {
fileSize += buffer.length;
file.size = fileSize;
if (fileSize > maxFileSize) {
// We are missing some way to tell the provider to cancel upload/multipart upload of the current file.
// - s3-upload-stream doesn't provide a way to do this in it's public interface
// - We could call provider.delete file but it would not delete multipart data
2016-07-26 18:13:30 +00:00
self._error(new Error(g.f('{{maxFileSize}} exceeded, received %s bytes of field data (max is %s)', fileSize, maxFileSize)));
return;
}
});
}
2014-01-24 17:44:58 +00:00
2015-11-24 01:16:48 +00:00
part.on("end", function() {
2014-01-24 17:44:58 +00:00
writer.end();
});
2015-11-24 01:16:48 +00:00
part.pipe(writer, {end: false});
2014-01-24 17:44:58 +00:00
};
2015-11-24 01:16:48 +00:00
form.parse(req, function(err, _fields, _files) {
2014-01-24 17:44:58 +00:00
if (err) {
console.error(err);
}
cb && cb(err, {files: files, fields: fields});
});
2015-11-24 01:16:48 +00:00
};
function handleError(res, err) {
if (err.code === 'ENOENT') {
res.type('application/json');
res.status(404).send({error: err});
return;
}
res.type('application/json');
res.status(500).send({error: err});
}
2015-11-24 01:16:48 +00:00
/**
2014-04-07 23:17:19 +00:00
* Handle download from a container/file.
* @param {Object} provider The storage service provider
2014-01-10 19:34:37 +00:00
* @param {Request} req The HTTP request
* @param {Response} res The HTTP response
* @param {String} container The container name
* @param {String} file The file name
2014-04-07 23:17:19 +00:00
* @callback {Function} cb Callback function.
2015-11-24 01:16:48 +00:00
* @header storageService.download(provider, req, res, container, file, cb)
*/
2015-11-24 01:16:48 +00:00
exports.download = function(provider, req, res, container, file, cb) {
2016-06-14 17:20:24 +00:00
var fileName = path.basename(file);
2015-11-24 01:16:48 +00:00
var params = {
2014-01-24 17:44:58 +00:00
container: container || req && req.params.container,
remote: file || req && req.params.file
2015-11-24 01:16:48 +00:00
};
2015-11-24 01:16:48 +00:00
var range = null;
if (req) {
if (req.headers) {
range = req.headers.range || '';
}
2015-11-24 01:16:48 +00:00
if (range) {
provider.getFile(params.container, params.remote, function(err, stats) {
if (err) {
handleError(res, err);
} else {
var total = stats.size;
var parts = range.replace(/bytes=/, "").split("-")
var partialstart = parts[0]
var partialend = parts[1]
params.start = parseInt(partialstart, 10)
params.end = partialend ? parseInt(partialend, 10) : total - 1
var chunksize = (params.end - params.start) + 1
res.status(206)
res.set("Content-Range", "bytes " + params.start + "-" + params.end + "/" + total);
res.set("Accept-Ranges", "bytes");
res.set("Content-Length", chunksize);
var reader = provider.download(params);
2016-06-14 17:20:24 +00:00
res.type(fileName);
2015-11-24 01:16:48 +00:00
reader.pipe(res);
reader.on('error', function(err) {
handleError(res, err);
});
reader.on('end', function() {
cb();
});
2015-11-24 01:16:48 +00:00
}
});
} else {
var reader = provider.download(params);
2016-06-14 17:20:24 +00:00
res.type(fileName);
2015-11-24 01:16:48 +00:00
reader.pipe(res);
reader.on('error', function(err) {
handleError(res, err);
});
reader.on('end', function() {
cb();
});
2015-11-24 01:16:48 +00:00
}
}
};