Skip to content

Commit

Permalink
storage: disable chunked transfer encoding if contentLength is set
Browse files Browse the repository at this point in the history
  • Loading branch information
stephenplusplus committed Sep 8, 2015
1 parent e77ab5f commit c04330c
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 1 deletion.
29 changes: 28 additions & 1 deletion lib/common/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
*/

var extend = require('extend');
var format = require('string-format-obj');
var googleAuth = require('google-auto-auth');
var is = require('is');
var nodeutil = require('util');
Expand Down Expand Up @@ -226,6 +227,7 @@ function makeWritableStream(dup, options, onComplete) {
};

var metadata = options.metadata || {};
var chunked = is.undefined(metadata.contentLength);

var reqOpts = extend(true, defaultReqOpts, options.request, {
multipart: [
Expand All @@ -247,7 +249,7 @@ function makeWritableStream(dup, options, onComplete) {
return;
}

request(authorizedReqOpts, function(err, resp, body) {
var requestStream = request(authorizedReqOpts, function(err, resp, body) {
util.handleResp(err, resp, body, function(err, data) {
if (err) {
dup.destroy(err);
Expand All @@ -257,6 +259,31 @@ function makeWritableStream(dup, options, onComplete) {
onComplete(data);
});
});

if (!chunked) {
// Total the length of all of the multiparts. This has to be done
// manually as it is not exposed by request, but is required for a non-
// chunked upload.
var parts = {
boundary: '86e04d4f-2700-4082-a77e-0c4395e20ee0', // uuid()
separator: '\r\n'
};

var contentLength = reqOpts.multipart.reduce(function(sum, part) {
return new Buffer(format('{bound}{sep}{part}{sep}{sep}{body}{sep}', {
bound: '--' + parts.boundary,
sep: parts.separator,
part: 'Content-Type: ' + part['Content-Type'],
body: part.body === writeStream ? '' : part.body
})).length + sum;
}, 0);

contentLength += new Buffer('--' + parts.boundary + '--').length;
contentLength += metadata.contentLength;

requestStream.headers['Content-Length'] = contentLength;
delete requestStream.headers['transfer-encoding'];
}
}
});
}
Expand Down
1 change: 1 addition & 0 deletions lib/storage/bucket.js
Original file line number Diff line number Diff line change
Expand Up @@ -996,6 +996,7 @@ Bucket.prototype.upload = function(localPath, options, callback) {
}

resumable = fd.size > RESUMABLE_THRESHOLD;
metadata.contentLength = fd.size;

upload();
});
Expand Down
3 changes: 3 additions & 0 deletions lib/storage/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -582,6 +582,9 @@ File.prototype.createReadStream = function(options) {
* @param {boolean} options.gzip - Automatically gzip the file. This will set
* `options.metadata.contentEncoding` to `gzip`.
* @param {object} options.metadata - Set the metadata for this file.
* @param {number} options.metadata.contentLength - Set this value to disable
* chunked transfer encoding. This only works with simple, non-resumable
* uploads.
* @param {boolean} options.resumable - Force a resumable upload. NOTE: When
* working with streams, the file format and size is unknown until it's
* completely consumed. Because of this, it's best for you to be explicit
Expand Down
20 changes: 20 additions & 0 deletions system-test/storage.js
Original file line number Diff line number Diff line change
Expand Up @@ -589,6 +589,26 @@ describe('storage', function() {
});
});

it('should write without chunked transfer-encoding', function(done) {
var file = bucket.file('LargeFile');

fs.stat(files.big.path, function(err, metadata) {
var ws = file.createWriteStream({
metadata: {
contentLength: metadata.size
}
});

fs.createReadStream(files.big.path)
.pipe(ws)
.on('error', done)
.on('finish', function() {
assert.equal(file.metadata.size, metadata.size);
file.delete(done);
});
});
});

it('should write metadata', function(done) {
var options = {
metadata: { contentType: 'image/png' },
Expand Down

0 comments on commit c04330c

Please sign in to comment.