Skip to content

Commit f759e4d

Browse files
authored
Merge pull request #4248 from christek91/optimize-large-file-uploads
Optimize large file uploads
2 parents 82782a3 + daa80ad commit f759e4d

File tree

1 file changed

+15
-7
lines changed

1 file changed

+15
-7
lines changed

notebook/static/tree/js/notebooklist.js

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1314,7 +1314,8 @@ define([
13141314

13151315
var parse_large_file = function (f, item) {
13161316
// codes inspired by https://stackoverflow.com/a/28318964
1317-
var chunk_size = 1024 * 1024;
1317+
// 8MB chunk size chosen to match chunk sizes used by benchmark reference (AWS S3)
1318+
var chunk_size = 1024 * 1024 * 8;
13181319
var offset = 0;
13191320
var chunk = 0;
13201321
var chunk_reader = null;
@@ -1365,18 +1366,25 @@ define([
13651366
reader.onerror = on_error;
13661367
};
13671368

1369+
// This approach avoids triggering multiple GC pauses for large files.
1370+
// Borrowed from kanaka's answer at:
1371+
// https://stackoverflow.com/questions/12710001/how-to-convert-uint8-array-to-base64-encoded-string
1372+
var Uint8ToString = function(u8a){
1373+
var CHUNK_SZ = 0x8000;
1374+
var c = [];
1375+
for (var i=0; i < u8a.length; i+=CHUNK_SZ) {
1376+
c.push(String.fromCharCode.apply(null, u8a.subarray(i, i+CHUNK_SZ)));
1377+
}
1378+
return c.join("");
1379+
};
1380+
13681381
// These codes to upload file in original class
13691382
var upload_file = function(item, chunk) {
13701383
var filedata = item.data('filedata');
13711384
if (filedata instanceof ArrayBuffer) {
13721385
// base64-encode binary file data
1373-
var bytes = '';
13741386
var buf = new Uint8Array(filedata);
1375-
var nbytes = buf.byteLength;
1376-
for (var i=0; i<nbytes; i++) {
1377-
bytes += String.fromCharCode(buf[i]);
1378-
}
1379-
filedata = btoa(bytes);
1387+
filedata = btoa(Uint8ToString(buf));
13801388
format = 'base64';
13811389
}
13821390
var model = { name: filename, path: path };

0 commit comments

Comments
 (0)