Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
[fix] aws storage client
- Update the aws-sdk
- Use built in upload rather than external module
- expose concurrency/queueSize and partSize configurability
- Add ability to abort upload
  • Loading branch information
jcrugzz committed Dec 21, 2016
commit 82949d81188758ea798cf0e72628cfd22ac603be
39 changes: 25 additions & 14 deletions lib/pkgcloud/amazon/storage/client/files.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,19 @@ exports.upload = function (options) {
Key: options.remote instanceof base.File ? options.remote.name : options.remote
};

//
// Allow for configuration of upload parameters
//
var uploadOptions = {};

if (options.concurrency || options.queueSize) {
uploadOptions.queueSize = options.concurrency || options.queueSize;
}

if (options.partSize) {
uploadOptions.partSize = options.partSize;
}

if (options.cacheControl) {
s3Options.CacheControl = options.cacheControl;
}
Expand All @@ -74,25 +87,23 @@ exports.upload = function (options) {
s3Options.ServerSideEncryption = options.ServerSideEncryption;
}

var proxyStream = through(),
writableStream = self.s3Stream.upload(s3Options);
var proxyStream = through();

// we need a proxy stream so we can always return a file model
// via the 'success' event
writableStream.on('uploaded', function(details) {
proxyStream.emit('success', new storage.File(self, details));
});
s3Options.Body = proxyStream;
var upload = this.s3.upload(s3Options, uploadOptions);

writableStream.on('error', function(err) {
proxyStream.emit('error', err);
});
//
// To allow aborting of upload if we want
//
proxyStream.abort = upload.abort.bind(upload);

writableStream.on('data', function (chunk) {
proxyStream.emit('data', chunk);
upload.send(function (err, details) {
if (err) return proxyStream.emit('error', err);
// we need a proxy stream so we can always return a file model
// via the 'success' event
proxyStream.emit('success', new storage.File(self, details));
});

proxyStream.pipe(writableStream);

return proxyStream;
};

Expand Down
4 changes: 0 additions & 4 deletions lib/pkgcloud/amazon/storage/client/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

var util = require('util'),
AWS = require('aws-sdk'),
s3Stream = require('s3-upload-stream'),
amazon = require('../../client'),
_ = require('lodash');

Expand All @@ -18,9 +17,6 @@ var Client = exports.Client = function (options) {
_.extend(this, require('./files'));

this.s3 = new AWS.S3(this._awsConfig);

// configure the s3Stream
this.s3Stream = s3Stream(this.s3);
};

util.inherits(Client, amazon.Client);
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
],
"dependencies": {
"async": "0.9.x",
"aws-sdk": "^2.2.43",
"aws-sdk": "~2.7.17",
"errs": "0.3.x",
"eventemitter2": "0.4.x",
"fast-json-patch": "0.5.x",
Expand All @@ -67,7 +67,6 @@
"mime": "1.2.x",
"qs": "1.2.x",
"request": "2.40.x",
"s3-upload-stream": "~1.0.7",
"through2": "0.6.x",
"url-join": "0.0.x",
"xml2js": "0.1.x"
Expand Down