@@ -1314,7 +1314,8 @@ define([
1314
1314
1315
1315
var parse_large_file = function ( f , item ) {
1316
1316
// codes inspired by https://stackoverflow.com/a/28318964
1317
- var chunk_size = 1024 * 1024 ;
1317
+ // 8MB chunk size chosen to match chunk sizes used by benchmark reference (AWS S3)
1318
+ var chunk_size = 1024 * 1024 * 8 ;
1318
1319
var offset = 0 ;
1319
1320
var chunk = 0 ;
1320
1321
var chunk_reader = null ;
@@ -1365,18 +1366,25 @@ define([
1365
1366
reader . onerror = on_error ;
1366
1367
} ;
1367
1368
1369
+ // This approach avoids triggering multiple GC pauses for large files.
1370
+ // Borrowed from kanaka's answer at:
1371
+ // https://stackoverflow.com/questions/12710001/how-to-convert-uint8-array-to-base64-encoded-string
1372
+ var Uint8ToString = function ( u8a ) {
1373
+ var CHUNK_SZ = 0x8000 ;
1374
+ var c = [ ] ;
1375
+ for ( var i = 0 ; i < u8a . length ; i += CHUNK_SZ ) {
1376
+ c . push ( String . fromCharCode . apply ( null , u8a . subarray ( i , i + CHUNK_SZ ) ) ) ;
1377
+ }
1378
+ return c . join ( "" ) ;
1379
+ } ;
1380
+
1368
1381
// These codes to upload file in original class
1369
1382
var upload_file = function ( item , chunk ) {
1370
1383
var filedata = item . data ( 'filedata' ) ;
1371
1384
if ( filedata instanceof ArrayBuffer ) {
1372
1385
// base64-encode binary file data
1373
- var bytes = '' ;
1374
1386
var buf = new Uint8Array ( filedata ) ;
1375
- var nbytes = buf . byteLength ;
1376
- for ( var i = 0 ; i < nbytes ; i ++ ) {
1377
- bytes += String . fromCharCode ( buf [ i ] ) ;
1378
- }
1379
- filedata = btoa ( bytes ) ;
1387
+ filedata = btoa ( Uint8ToString ( buf ) ) ;
1380
1388
format = 'base64' ;
1381
1389
}
1382
1390
var model = { name : filename , path : path } ;
0 commit comments