diff --git a/CHANGELOG.md b/CHANGELOG.md index 747c858..ca145f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,10 @@ Changelog ========= +#### 1.0.1 (2014-09-26) + +Fixed error in usage in the documentation and examples. The examples did not use the "new" keyword when creating the upload stream, so there were scope issues when doing parallel uploads. This has been clarified and corrected in the documentation and examples. + #### 1.0.0 (2014-09-15) Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation. diff --git a/README.md b/README.md index abd0861..0facaba 100644 --- a/README.md +++ b/README.md @@ -6,13 +6,13 @@ A pipeable write stream which uploads to Amazon S3 using the multipart file uplo ### Changelog -#### 1.0.0 (2014-09-15) +#### 1.0.1 (2014-09-26) -Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation. +Fixed error in usage in the documentation and examples. The examples did not use the "new" keyword when creating the upload stream, so there were scope issues when doing parallel uploads. This has been clarified and corrected in the documentation and examples. -#### 0.6.2 (2014-08-31) +#### 1.0.0 (2014-09-15) -Upgrading the AWS SDK dependency to the latest version. Fixes issue #11 +Major overhaul of the functional interface. Breaks compatability with older versions of the module in favor of a cleaner, more streamlined approach. A migration guide for users of older versions of the module has been included in the documentation. [Historical Changelogs](CHANGELOG.md) @@ -44,7 +44,7 @@ s3Stream.client(new AWS.S3()); // Create the streams var read = fs.createReadStream('/path/to/a/file'); var compress = zlib.createGzip(); -var upload = s3Stream.upload({ +var upload = new s3Stream.upload({ "Bucket": "bucket-name", "Key": "key-name" }); @@ -118,7 +118,7 @@ var s3Stream = require('s3-upload-stream'), s3Stream.client(new AWS.S3()); var read = fs.createReadStream('/path/to/a/file'); -var upload = s3Client.upload({ +var upload = new s3Client.upload({ "Bucket": "bucket-name", "Key": "key-name", "ACL": "public-read", @@ -144,7 +144,7 @@ var s3Stream = require('s3-upload-stream'), s3Stream.client(new AWS.S3()); var read = fs.createReadStream('/path/to/a/file'); -var upload = s3Client.upload({ +var upload = new s3Client.upload({ "Bucket": "bucket-name", "Key": "key-name" }); @@ -167,7 +167,7 @@ var s3Stream = require('s3-upload-stream'), s3Stream.client(new AWS.S3()); var read = fs.createReadStream('/path/to/a/file'); -var upload = s3Client.upload({ +var upload = new s3Client.upload({ "Bucket": "bucket-name", "Key": "key-name" }); diff --git a/examples/upload.js b/examples/upload.js index e644f6f..512f117 100755 --- a/examples/upload.js +++ b/examples/upload.js @@ -13,7 +13,7 @@ s3Stream.client(new AWS.S3()); // Create the streams var read = fs.createReadStream('../lib/s3-upload-stream.js'); var compress = zlib.createGzip(); -var upload = s3Stream.upload({ +var upload = new s3Stream.upload({ "Bucket": "bucket-name", "Key": "key-name" }); diff --git a/lib/s3-upload-stream.js b/lib/s3-upload-stream.js index 8623692..ba741d8 100644 --- a/lib/s3-upload-stream.js +++ b/lib/s3-upload-stream.js @@ -15,7 +15,7 @@ module.exports = { var e = new events.EventEmitter(); // Create the writeable stream interface. - self.ws = Writable({ + self.ws = new Writable({ highWaterMark: 4194304 // 4 MB }); @@ -210,8 +210,11 @@ module.exports = { function (err, result) { if (err) self.abortUpload('Failed to complete the multipart upload on S3: ' + JSON.stringify(err)); - else + else { + // Emit both events for backwards compatability, and to follow the spec. self.ws.emit('uploaded', result); + self.ws.emit('finished', result); + } } ); }; diff --git a/package.json b/package.json index 68e54f3..111e3eb 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "s3-upload-stream", "description": "Writeable stream for uploading content of unknown size to S3 via the multipart API.", - "version": "1.0.0", + "version": "1.0.1", "author": { "name": "Nathan Peck", "email": "nathan@storydesk.com" diff --git a/tests/test.js b/tests/test.js index 33cb248..ac82d18 100644 --- a/tests/test.js +++ b/tests/test.js @@ -114,7 +114,7 @@ describe('Creating upload stream', function () { it('should throw an error', function (done) { try { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "test-file-name" }); @@ -133,7 +133,7 @@ describe('Creating upload stream', function () { before(function (done) { s3Stream.client(new AWSstub.S3()); - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "test-file-name" }); @@ -155,7 +155,7 @@ describe('Stream Methods', function () { var uploadStream; before(function (done) { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "test-file-name" }); @@ -208,7 +208,7 @@ describe('Piping data into the writable upload stream', function () { var uploadStream; before(function (done) { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "test-file-name" }); @@ -264,7 +264,7 @@ describe('Piping data into the writable upload stream', function () { describe('S3 Error catching', function () { describe('Error creating multipart upload', function () { it('should emit an error', function (done) { - var uploadStream = s3Stream.upload({ + var uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "create-fail" }); @@ -279,7 +279,7 @@ describe('S3 Error catching', function () { var uploadStream; before(function (done) { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "upload-fail" }); @@ -308,7 +308,7 @@ describe('S3 Error catching', function () { var uploadStream; before(function (done) { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "complete-fail" }); @@ -337,7 +337,7 @@ describe('S3 Error catching', function () { var uploadStream; before(function (done) { - uploadStream = s3Stream.upload({ + uploadStream = new s3Stream.upload({ "Bucket": "test-bucket-name", "Key": "abort-fail" });