Skip to content

Instantly share code, notes, and snippets.

@rhamedy
Last active January 30, 2021 23:34
Show Gist options
  • Save rhamedy/9607ce395f3cb1b758eb to your computer and use it in GitHub Desktop.
Save rhamedy/9607ce395f3cb1b758eb to your computer and use it in GitHub Desktop.
Save the file that is uploaded via Resumable.js in Express App

I created this gist from here https://github.com/23/resumable.js in order to modify the node.js sample for the purpose of allowing the uploaded file to be saved locally.

Instructions on how to see the answer to the following question http://stackoverflow.com/questions/33288192/resumable-js-cant-save-file-to-server-side-directory

  1. Create a directory (i.e. resumable-app)
  2. copy/paste the app.js (atleast), resumable.js and resumable-node.js from gist in the directory (you can copy the resumable.js and resumable-node.js from official github if you want too, not change has been made to them)
  3. copy/paste the package.json and do a npm install to get the modules (or manually npm install these express, path, fs, connect-multiparty)
  4. create uploads directory in the main app directory and make sure it is writable
  5. Copy/past the public directory from https://github.com/23/resumable.js/tree/master/samples/Node.js that contains icons in the form of .png a style.css and index.html for uploading
  6. Change the target value in the line 49 or 50 of index.html in the public directory you just copied to your servers address in my case http://localhost:3000/upload
  7. You can tweak the chunk size, number of simultaneous chunk uploads, etc in the index.html but, I left them as default (in fact, I changed the chunks from 3 to 4)
  8. Done all the above, then you are good to go.
  9. Run the app i.e. nodemon app.js or node app.js
  10. Navigate to your server address or http://localhost:3000/upload

Upload a file and it should end up in the uploads directory.

var express = require("express");
var path = require("path");
//include fs as we will use fs.writeStream
var fs = require("fs");
//supply a directory where the chunks and the file should be saved
//uploads is a directory in the main app folder.
//copy/paste resumable-node.js to the same directory as app.js
var resumable = require('./resumable-node.js')(__dirname + "/uploads");
var app = express();
var multipart = require('connect-multiparty');
app.use(express.static(path.join(__dirname, '/public')));
app.use(multipart());
// Handle uploads through Resumable.js
app.post('/upload', function(req, res){
resumable.post(req, function(status, filename, original_filename, identifier){
//when all chunks are uploded then status equals to "done" otherwise "partly_done"
if (status === 'done') {
//when all chunks uploaded, then createWriteStream to /uploads folder with filename
var stream = fs.createWriteStream('./uploads/' + filename);
//stitches the file chunks back together to create the original file.
resumable.write(identifier, stream);
stream.on('data', function(data){});
stream.on('end', function(){});
//delete chunks after original file is re-created.
resumable.clean(identifier);
}
res.send(status, {
// NOTE: Uncomment this funciton to enable cross-domain request.
//'Access-Control-Allow-Origin': '*'
});
});
});
// Handle status checks on chunks through Resumable.js
app.get('/upload', function(req, res){
resumable.get(req, function(status, filename, original_filename, identifier){
console.log('GET', status);
res.send((status == 'found' ? 200 : 404), status);
});
});
app.get('/download/:identifier', function(req, res){
resumable.write(req.params.identifier, res);
});
app.get('/resumable.js', function (req, res) {
var fs = require('fs');
res.setHeader("content-type", "application/javascript");
//don't forget to copy resumable.js to the same location as app.js or
//change the following so that readStream be able to file where resumable.js is
fs.createReadStream("./resumable.js").pipe(res);
});
app.listen(3000, function () {
console.log('Example app listening on port 3000!');
});
{
"name": "resumble-example",
"version": "1.0.0",
"description": "Sample Express App to demonstrate Resumable.js",
"main": "app.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [
"Resumable.js",
"upload",
"express",
"nodejs"
],
"author": "Raf",
"dependencies": {
"connect-multiparty": "^2.0.0",
"express": "^4.13.4",
"fs": "0.0.2",
"path": "^0.12.7"
}
}
//copied as-is from resumable github
var fs = require('fs'), path = require('path'), util = require('util'), Stream = require('stream').Stream;
module.exports = resumable = function(temporaryFolder){
var $ = this;
$.temporaryFolder = temporaryFolder;
$.maxFileSize = null;
$.fileParameterName = 'file';
try {
fs.mkdirSync($.temporaryFolder);
}catch(e){}
var cleanIdentifier = function(identifier){
return identifier.replace(/^0-9A-Za-z_-/img, '');
}
var getChunkFilename = function(chunkNumber, identifier){
// Clean up the identifier
identifier = cleanIdentifier(identifier);
// What would the file name be?
return path.join($.temporaryFolder, './resumable-'+identifier+'.'+chunkNumber);
}
var validateRequest = function(chunkNumber, chunkSize, totalSize, identifier, filename, fileSize){
// Clean up the identifier
identifier = cleanIdentifier(identifier);
// Check if the request is sane
if (chunkNumber==0 || chunkSize==0 || totalSize==0 || identifier.length==0 || filename.length==0) {
return 'non_resumable_request';
}
var numberOfChunks = Math.max(Math.floor(totalSize/(chunkSize*1.0)), 1);
if (chunkNumber>numberOfChunks) {
return 'invalid_resumable_request1';
}
// Is the file too big?
if($.maxFileSize && totalSize>$.maxFileSize) {
return 'invalid_resumable_request2';
}
if(typeof(fileSize)!='undefined') {
if(chunkNumber<numberOfChunks && fileSize!=chunkSize) {
// The chunk in the POST request isn't the correct size
return 'invalid_resumable_request3';
}
if(numberOfChunks>1 && chunkNumber==numberOfChunks && fileSize!=((totalSize%chunkSize)+chunkSize)) {
// The chunks in the POST is the last one, and the fil is not the correct size
return 'invalid_resumable_request4';
}
if(numberOfChunks==1 && fileSize!=totalSize) {
// The file is only a single chunk, and the data size does not fit
return 'invalid_resumable_request5';
}
}
return 'valid';
}
//'found', filename, original_filename, identifier
//'not_found', null, null, null
$.get = function(req, callback){
var chunkNumber = req.param('resumableChunkNumber', 0);
var chunkSize = req.param('resumableChunkSize', 0);
var totalSize = req.param('resumableTotalSize', 0);
var identifier = req.param('resumableIdentifier', "");
var filename = req.param('resumableFilename', "");
if(validateRequest(chunkNumber, chunkSize, totalSize, identifier, filename)=='valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
fs.exists(chunkFilename, function(exists){
if(exists){
callback('found', chunkFilename, filename, identifier);
} else {
callback('not_found', null, null, null);
}
});
} else {
callback('not_found', null, null, null);
}
}
//'partly_done', filename, original_filename, identifier
//'done', filename, original_filename, identifier
//'invalid_resumable_request', null, null, null
//'non_resumable_request', null, null, null
$.post = function(req, callback){
var fields = req.body;
var files = req.files;
var chunkNumber = fields['resumableChunkNumber'];
var chunkSize = fields['resumableChunkSize'];
var totalSize = fields['resumableTotalSize'];
var identifier = cleanIdentifier(fields['resumableIdentifier']);
var filename = fields['resumableFilename'];
var original_filename = fields['resumableIdentifier'];
if(!files[$.fileParameterName] || !files[$.fileParameterName].size) {
callback('invalid_resumable_request', null, null, null);
return;
}
var validation = validateRequest(chunkNumber, chunkSize, totalSize, identifier, files[$.fileParameterName].size);
if(validation=='valid') {
var chunkFilename = getChunkFilename(chunkNumber, identifier);
// Save the chunk (TODO: OVERWRITE)
fs.rename(files[$.fileParameterName].path, chunkFilename, function(){
// Do we have all the chunks?
var currentTestChunk = 1;
var numberOfChunks = Math.max(Math.floor(totalSize/(chunkSize*1.0)), 1);
var testChunkExists = function(){
fs.exists(getChunkFilename(currentTestChunk, identifier), function(exists){
if(exists){
currentTestChunk++;
if(currentTestChunk>numberOfChunks) {
callback('done', filename, original_filename, identifier);
} else {
// Recursion
testChunkExists();
}
} else {
callback('partly_done', filename, original_filename, identifier);
}
});
}
testChunkExists();
});
} else {
callback(validation, filename, original_filename, identifier);
}
}
// Pipe chunks directly in to an existsing WritableStream
// r.write(identifier, response);
// r.write(identifier, response, {end:false});
//
// var stream = fs.createWriteStream(filename);
// r.write(identifier, stream);
// stream.on('data', function(data){...});
// stream.on('end', function(){...});
$.write = function(identifier, writableStream, options) {
options = options || {};
options.end = (typeof options['end'] == 'undefined' ? true : options['end']);
// Iterate over each chunk
var pipeChunk = function(number) {
var chunkFilename = getChunkFilename(number, identifier);
fs.exists(chunkFilename, function(exists) {
if (exists) {
// If the chunk with the current number exists,
// then create a ReadStream from the file
// and pipe it to the specified writableStream.
var sourceStream = fs.createReadStream(chunkFilename);
sourceStream.pipe(writableStream, {
end: false
});
sourceStream.on('end', function() {
// When the chunk is fully streamed,
// jump to the next one
pipeChunk(number + 1);
});
} else {
// When all the chunks have been piped, end the stream
if (options.end) writableStream.end();
if (options.onDone) options.onDone();
}
});
}
pipeChunk(1);
}
$.clean = function(identifier, options) {
options = options || {};
// Iterate over each chunk
var pipeChunkRm = function(number) {
var chunkFilename = getChunkFilename(number, identifier);
//console.log('removing pipeChunkRm ', number, 'chunkFilename', chunkFilename);
fs.exists(chunkFilename, function(exists) {
if (exists) {
console.log('exist removing ', chunkFilename);
fs.unlink(chunkFilename, function(err) {
if (err && options.onError) options.onError(err);
});
pipeChunkRm(number + 1);
} else {
if (options.onDone) options.onDone();
}
});
}
pipeChunkRm(1);
}
return $;
}
/*
* MIT Licensed
* http://www.23developer.com/opensource
* http://github.com/23/resumable.js
* Steffen Tiedemann Christensen, [email protected]
*/
(function(){
"use strict";
var Resumable = function(opts){
if ( !(this instanceof Resumable) ) {
return new Resumable(opts);
}
this.version = 1.0;
// SUPPORTED BY BROWSER?
// Check if these features are support by the browser:
// - File object type
// - Blob object type
// - FileList object type
// - slicing files
this.support = (
(typeof(File)!=='undefined')
&&
(typeof(Blob)!=='undefined')
&&
(typeof(FileList)!=='undefined')
&&
(!!Blob.prototype.webkitSlice||!!Blob.prototype.mozSlice||!!Blob.prototype.slice||false)
);
if(!this.support) return(false);
// PROPERTIES
var $ = this;
$.files = [];
$.defaults = {
chunkSize:1*1024*1024,
forceChunkSize:false,
simultaneousUploads:3,
fileParameterName:'file',
throttleProgressCallbacks:0.5,
query:{},
headers:{},
preprocess:null,
method:'multipart',
uploadMethod: 'POST',
testMethod: 'GET',
prioritizeFirstAndLastChunk:false,
target:'/',
parameterNamespace:'',
testChunks:true,
generateUniqueIdentifier:null,
getTarget:null,
maxChunkRetries:undefined,
chunkRetryInterval:undefined,
permanentErrors:[400, 404, 415, 500, 501],
maxFiles:undefined,
withCredentials:false,
xhrTimeout:0,
maxFilesErrorCallback:function (files, errorCount) {
var maxFiles = $.getOpt('maxFiles');
alert('Please upload no more than ' + maxFiles + ' file' + (maxFiles === 1 ? '' : 's') + ' at a time.');
},
minFileSize:1,
minFileSizeErrorCallback:function(file, errorCount) {
alert(file.fileName||file.name +' is too small, please upload files larger than ' + $h.formatSize($.getOpt('minFileSize')) + '.');
},
maxFileSize:undefined,
maxFileSizeErrorCallback:function(file, errorCount) {
alert(file.fileName||file.name +' is too large, please upload files less than ' + $h.formatSize($.getOpt('maxFileSize')) + '.');
},
fileType: [],
fileTypeErrorCallback: function(file, errorCount) {
alert(file.fileName||file.name +' has type not allowed, please upload files of type ' + $.getOpt('fileType') + '.');
}
};
$.opts = opts||{};
$.getOpt = function(o) {
var $opt = this;
// Get multiple option if passed an array
if(o instanceof Array) {
var options = {};
$h.each(o, function(option){
options[option] = $opt.getOpt(option);
});
return options;
}
// Otherwise, just return a simple option
if ($opt instanceof ResumableChunk) {
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
else { $opt = $opt.fileObj; }
}
if ($opt instanceof ResumableFile) {
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
else { $opt = $opt.resumableObj; }
}
if ($opt instanceof Resumable) {
if (typeof $opt.opts[o] !== 'undefined') { return $opt.opts[o]; }
else { return $opt.defaults[o]; }
}
};
// EVENTS
// catchAll(event, ...)
// fileSuccess(file), fileProgress(file), fileAdded(file, event), fileRetry(file), fileError(file, message),
// complete(), progress(), error(message, file), pause()
$.events = [];
$.on = function(event,callback){
$.events.push(event.toLowerCase(), callback);
};
$.fire = function(){
// `arguments` is an object, not array, in FF, so:
var args = [];
for (var i=0; i<arguments.length; i++) args.push(arguments[i]);
// Find event listeners, and support pseudo-event `catchAll`
var event = args[0].toLowerCase();
for (var i=0; i<=$.events.length; i+=2) {
if($.events[i]==event) $.events[i+1].apply($,args.slice(1));
if($.events[i]=='catchall') $.events[i+1].apply(null,args);
}
if(event=='fileerror') $.fire('error', args[2], args[1]);
if(event=='fileprogress') $.fire('progress');
};
// INTERNAL HELPER METHODS (handy, but ultimately not part of uploading)
var $h = {
stopEvent: function(e){
e.stopPropagation();
e.preventDefault();
},
each: function(o,callback){
if(typeof(o.length)!=='undefined') {
for (var i=0; i<o.length; i++) {
// Array or FileList
if(callback(o[i])===false) return;
}
} else {
for (i in o) {
// Object
if(callback(i,o[i])===false) return;
}
}
},
generateUniqueIdentifier:function(file){
var custom = $.getOpt('generateUniqueIdentifier');
if(typeof custom === 'function') {
return custom(file);
}
var relativePath = file.webkitRelativePath||file.fileName||file.name; // Some confusion in different versions of Firefox
var size = file.size;
return(size + '-' + relativePath.replace(/[^0-9a-zA-Z_-]/img, ''));
},
contains:function(array,test) {
var result = false;
$h.each(array, function(value) {
if (value == test) {
result = true;
return false;
}
return true;
});
return result;
},
formatSize:function(size){
if(size<1024) {
return size + ' bytes';
} else if(size<1024*1024) {
return (size/1024.0).toFixed(0) + ' KB';
} else if(size<1024*1024*1024) {
return (size/1024.0/1024.0).toFixed(1) + ' MB';
} else {
return (size/1024.0/1024.0/1024.0).toFixed(1) + ' GB';
}
},
getTarget:function(params){
var target = $.getOpt('target');
if(typeof target === 'function') {
return target(params);
}
if(target.indexOf('?') < 0) {
target += '?';
} else {
target += '&';
}
return target + params.join('&');
}
};
var onDrop = function(event){
$h.stopEvent(event);
//handle dropped things as items if we can (this lets us deal with folders nicer in some cases)
if (event.dataTransfer && event.dataTransfer.items) {
loadFiles(event.dataTransfer.items, event);
}
//else handle them as files
else if (event.dataTransfer && event.dataTransfer.files) {
loadFiles(event.dataTransfer.files, event);
}
};
var preventDefault = function(e) {
e.preventDefault();
};
// INTERNAL METHODS (both handy and responsible for the heavy load)
/**
* @summary This function loops over the files passed in from a drag and drop operation and gets them ready for appendFilesFromFileList
* It attempts to use FileSystem API calls to extract files and subfolders if the dropped items include folders
* That capability is only currently available in Chrome, but if it isn't available it will just pass the items along to
* appendFilesFromFileList (via enqueueFileAddition to help with asynchronous processing.)
* @param files {Array} - the File or Entry objects to be processed depending on your browser support
* @param event {Object} - the drop event object
* @param [queue] {Object} - an object to keep track of our progress processing the dropped items
* @param [path] {String} - the relative path from the originally selected folder to the current files if extracting files from subfolders
*/
var loadFiles = function (files, event, queue, path){
//initialize the queue object if it doesn't exist
if (!queue) {
queue = {
total: 0,
files: [],
event: event
};
}
//update the total number of things we plan to process
updateQueueTotal(files.length, queue);
//loop over all the passed in objects checking if they are files or folders
for (var i = 0; i < files.length; i++) {
var file = files[i];
var entry, reader;
if (file.isFile || file.isDirectory) {
//this is an object we can handle below with no extra work needed up front
entry = file;
}
else if (file.getAsEntry) {
//get the file as an entry object if we can using the proposed HTML5 api (unlikely to get implemented by anyone)
entry = file.getAsEntry();
}
else if (file.webkitGetAsEntry) {
//get the file as an entry object if we can using the Chrome specific webkit implementation
entry = file.webkitGetAsEntry();
}
else if (typeof file.getAsFile === 'function') {
//if this is still a DataTransferItem object, get it as a file object
enqueueFileAddition(file.getAsFile(), queue, path);
//we just added this file object to the queue so we can go to the next object in the loop and skip the processing below
continue;
}
else if (File && file instanceof File) {
//this is already a file object so just queue it up and move on
enqueueFileAddition(file, queue, path);
//we just added this file object to the queue so we can go to the next object in the loop and skip the processing below
continue;
}
else {
//we can't do anything with this object, decrement the expected total and skip the processing below
updateQueueTotal(-1, queue);
continue;
}
if (!entry) {
//there isn't anything we can do with this so decrement the total expected
updateQueueTotal(-1, queue);
}
else if (entry.isFile) {
//this is handling to read an entry object representing a file, parsing the file object is asynchronous which is why we need the queue
//currently entry objects will only exist in this flow for Chrome
entry.file(function(file) {
enqueueFileAddition(file, queue, path);
}, function(err) {
console.warn(err);
});
}
else if (entry.isDirectory) {
//this is handling to read an entry object representing a folder, parsing the directory object is asynchronous which is why we need the queue
//currently entry objects will only exist in this flow for Chrome
reader = entry.createReader();
var newEntries = [];
//wrap the callback in another function so we can store the path in a closure
var readDir = function(path){
reader.readEntries(
//success callback: read entries out of the directory
function(entries){
if (entries.length>0){
//add these results to the array of all the new stuff
for (var i=0; i<entries.length; i++) { newEntries.push(entries[i]); }
//call this function again as all the results may not have been sent yet
readDir(entry.fullPath);
}
else {
//we have now gotten all the results in newEntries so let's process them recursively
loadFiles(newEntries, event, queue, path);
//this was a directory rather than a file so decrement the expected file count
updateQueueTotal(-1, queue);
}
},
//error callback, most often hit if there is a directory with nothing inside it
function(err) {
//this was a directory rather than a file so decrement the expected file count
updateQueueTotal(-1, queue);
console.warn(err);
}
);
};
readDir(entry.fullPath);
}
}
};
/**
* @summary Adjust the total number of files we are expecting to process
* if decrementing and the new expected total is equal to the number processed, flush the queue
* @param addition {Number} - the number of additional files we expect to process (may be negative)
* @param queue {Object} - an object to keep track of our progress processing the dropped items
*/
var updateQueueTotal = function(addition, queue){
queue.total += addition;
// If all the files we expect have shown up, then flush the queue.
if (queue.files.length === queue.total) {
appendFilesFromFileList(queue.files, queue.event);
}
};
/**
* @summary Add a file to the queue of processed files, if it brings the total up to the expected total, flush the queue
* @param file {Object} - File object to be passed along to appendFilesFromFileList eventually
* @param queue {Object} - an object to keep track of our progress processing the dropped items
* @param [path] {String} - the file's relative path from the originally dropped folder if we are parsing folder content (Chrome only for now)
*/
var enqueueFileAddition = function(file, queue, path) {
//store the path to this file if it came in as part of a folder
if (path) file.relativePath = path + '/' + file.name;
queue.files.push(file);
// If all the files we expect have shown up, then flush the queue.
if (queue.files.length === queue.total) {
appendFilesFromFileList(queue.files, queue.event);
}
};
var appendFilesFromFileList = function(fileList, event){
// check for uploading too many files
var errorCount = 0;
var o = $.getOpt(['maxFiles', 'minFileSize', 'maxFileSize', 'maxFilesErrorCallback', 'minFileSizeErrorCallback', 'maxFileSizeErrorCallback', 'fileType', 'fileTypeErrorCallback']);
if (typeof(o.maxFiles)!=='undefined' && o.maxFiles<(fileList.length+$.files.length)) {
// if single-file upload, file is already added, and trying to add 1 new file, simply replace the already-added file
if (o.maxFiles===1 && $.files.length===1 && fileList.length===1) {
$.removeFile($.files[0]);
} else {
o.maxFilesErrorCallback(fileList, errorCount++);
return false;
}
}
var files = [];
$h.each(fileList, function(file){
var fileName = file.name;
if(o.fileType.length > 0){
var fileTypeFound = false;
for(var index in o.fileType){
var extension = '.' + o.fileType[index];
if(fileName.indexOf(extension, fileName.length - extension.length) !== -1){
fileTypeFound = true;
break;
}
}
if (!fileTypeFound) {
o.fileTypeErrorCallback(file, errorCount++);
return false;
}
}
if (typeof(o.minFileSize)!=='undefined' && file.size<o.minFileSize) {
o.minFileSizeErrorCallback(file, errorCount++);
return false;
}
if (typeof(o.maxFileSize)!=='undefined' && file.size>o.maxFileSize) {
o.maxFileSizeErrorCallback(file, errorCount++);
return false;
}
function addFile(uniqueIdentifier){
if (!$.getFromUniqueIdentifier(uniqueIdentifier)) {(function(){
file.uniqueIdentifier = uniqueIdentifier;
var f = new ResumableFile($, file, uniqueIdentifier);
$.files.push(f);
files.push(f);
f.container = (typeof event != 'undefined' ? event.srcElement : null);
window.setTimeout(function(){
$.fire('fileAdded', f, event)
},0);
})()};
}
// directories have size == 0
var uniqueIdentifier = $h.generateUniqueIdentifier(file)
if(uniqueIdentifier && typeof uniqueIdentifier.done === 'function' && typeof uniqueIdentifier.fail === 'function'){
uniqueIdentifier
.done(function(uniqueIdentifier){
addFile(uniqueIdentifier);
})
.fail(function(){
addFile();
});
}else{
addFile(uniqueIdentifier);
}
});
window.setTimeout(function(){
$.fire('filesAdded', files)
},0);
};
// INTERNAL OBJECT TYPES
function ResumableFile(resumableObj, file, uniqueIdentifier){
var $ = this;
$.opts = {};
$.getOpt = resumableObj.getOpt;
$._prevProgress = 0;
$.resumableObj = resumableObj;
$.file = file;
$.fileName = file.fileName||file.name; // Some confusion in different versions of Firefox
$.size = file.size;
$.relativePath = file.webkitRelativePath || file.relativePath || $.fileName;
$.uniqueIdentifier = uniqueIdentifier;
$._pause = false;
$.container = '';
var _error = uniqueIdentifier !== undefined;
// Callback when something happens within the chunk
var chunkEvent = function(event, message){
// event can be 'progress', 'success', 'error' or 'retry'
switch(event){
case 'progress':
$.resumableObj.fire('fileProgress', $);
break;
case 'error':
$.abort();
_error = true;
$.chunks = [];
$.resumableObj.fire('fileError', $, message);
break;
case 'success':
if(_error) return;
$.resumableObj.fire('fileProgress', $); // it's at least progress
if($.isComplete()) {
$.resumableObj.fire('fileSuccess', $, message);
}
break;
case 'retry':
$.resumableObj.fire('fileRetry', $);
break;
}
};
// Main code to set up a file object with chunks,
// packaged to be able to handle retries if needed.
$.chunks = [];
$.abort = function(){
// Stop current uploads
var abortCount = 0;
$h.each($.chunks, function(c){
if(c.status()=='uploading') {
c.abort();
abortCount++;
}
});
if(abortCount>0) $.resumableObj.fire('fileProgress', $);
};
$.cancel = function(){
// Reset this file to be void
var _chunks = $.chunks;
$.chunks = [];
// Stop current uploads
$h.each(_chunks, function(c){
if(c.status()=='uploading') {
c.abort();
$.resumableObj.uploadNextChunk();
}
});
$.resumableObj.removeFile($);
$.resumableObj.fire('fileProgress', $);
};
$.retry = function(){
$.bootstrap();
var firedRetry = false;
$.resumableObj.on('chunkingComplete', function(){
if(!firedRetry) $.resumableObj.upload();
firedRetry = true;
});
};
$.bootstrap = function(){
$.abort();
_error = false;
// Rebuild stack of chunks from file
$.chunks = [];
$._prevProgress = 0;
var round = $.getOpt('forceChunkSize') ? Math.ceil : Math.floor;
var maxOffset = Math.max(round($.file.size/$.getOpt('chunkSize')),1);
for (var offset=0; offset<maxOffset; offset++) {(function(offset){
window.setTimeout(function(){
$.chunks.push(new ResumableChunk($.resumableObj, $, offset, chunkEvent));
$.resumableObj.fire('chunkingProgress',$,offset/maxOffset);
},0);
})(offset)}
window.setTimeout(function(){
$.resumableObj.fire('chunkingComplete',$);
},0);
};
$.progress = function(){
if(_error) return(1);
// Sum up progress across everything
var ret = 0;
var error = false;
$h.each($.chunks, function(c){
if(c.status()=='error') error = true;
ret += c.progress(true); // get chunk progress relative to entire file
});
ret = (error ? 1 : (ret>0.99999 ? 1 : ret));
ret = Math.max($._prevProgress, ret); // We don't want to lose percentages when an upload is paused
$._prevProgress = ret;
return(ret);
};
$.isUploading = function(){
var uploading = false;
$h.each($.chunks, function(chunk){
if(chunk.status()=='uploading') {
uploading = true;
return(false);
}
});
return(uploading);
};
$.isComplete = function(){
var outstanding = false;
$h.each($.chunks, function(chunk){
var status = chunk.status();
if(status=='pending' || status=='uploading' || chunk.preprocessState === 1) {
outstanding = true;
return(false);
}
});
return(!outstanding);
};
$.pause = function(pause){
if(typeof(pause)==='undefined'){
$._pause = ($._pause ? false : true);
}else{
$._pause = pause;
}
};
$.isPaused = function() {
return $._pause;
};
// Bootstrap and return
$.resumableObj.fire('chunkingStart', $);
$.bootstrap();
return(this);
}
function ResumableChunk(resumableObj, fileObj, offset, callback){
var $ = this;
$.opts = {};
$.getOpt = resumableObj.getOpt;
$.resumableObj = resumableObj;
$.fileObj = fileObj;
$.fileObjSize = fileObj.size;
$.fileObjType = fileObj.file.type;
$.offset = offset;
$.callback = callback;
$.lastProgressCallback = (new Date);
$.tested = false;
$.retries = 0;
$.pendingRetry = false;
$.preprocessState = 0; // 0 = unprocessed, 1 = processing, 2 = finished
// Computed properties
var chunkSize = $.getOpt('chunkSize');
$.loaded = 0;
$.startByte = $.offset*chunkSize;
$.endByte = Math.min($.fileObjSize, ($.offset+1)*chunkSize);
if ($.fileObjSize-$.endByte < chunkSize && !$.getOpt('forceChunkSize')) {
// The last chunk will be bigger than the chunk size, but less than 2*chunkSize
$.endByte = $.fileObjSize;
}
$.xhr = null;
// test() makes a GET request without any data to see if the chunk has already been uploaded in a previous session
$.test = function(){
// Set up request and listen for event
$.xhr = new XMLHttpRequest();
var testHandler = function(e){
$.tested = true;
var status = $.status();
if(status=='success') {
$.callback(status, $.message());
$.resumableObj.uploadNextChunk();
} else {
$.send();
}
};
$.xhr.addEventListener('load', testHandler, false);
$.xhr.addEventListener('error', testHandler, false);
$.xhr.addEventListener('timeout', testHandler, false);
// Add data from the query options
var params = [];
var parameterNamespace = $.getOpt('parameterNamespace');
var customQuery = $.getOpt('query');
if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $);
$h.each(customQuery, function(k,v){
params.push([encodeURIComponent(parameterNamespace+k), encodeURIComponent(v)].join('='));
});
// Add extra data to identify chunk
params.push([parameterNamespace+'resumableChunkNumber', encodeURIComponent($.offset+1)].join('='));
params.push([parameterNamespace+'resumableChunkSize', encodeURIComponent($.getOpt('chunkSize'))].join('='));
params.push([parameterNamespace+'resumableCurrentChunkSize', encodeURIComponent($.endByte - $.startByte)].join('='));
params.push([parameterNamespace+'resumableTotalSize', encodeURIComponent($.fileObjSize)].join('='));
params.push([parameterNamespace+'resumableType', encodeURIComponent($.fileObjType)].join('='));
params.push([parameterNamespace+'resumableIdentifier', encodeURIComponent($.fileObj.uniqueIdentifier)].join('='));
params.push([parameterNamespace+'resumableFilename', encodeURIComponent($.fileObj.fileName)].join('='));
params.push([parameterNamespace+'resumableRelativePath', encodeURIComponent($.fileObj.relativePath)].join('='));
params.push([parameterNamespace+'resumableTotalChunks', encodeURIComponent($.fileObj.chunks.length)].join('='));
// Append the relevant chunk and send it
$.xhr.open($.getOpt('testMethod'), $h.getTarget(params));
$.xhr.timeout = $.getOpt('xhrTimeout');
$.xhr.withCredentials = $.getOpt('withCredentials');
// Add data from header options
var customHeaders = $.getOpt('headers');
if(typeof customHeaders === 'function') {
customHeaders = customHeaders($.fileObj, $);
}
$h.each(customHeaders, function(k,v) {
$.xhr.setRequestHeader(k, v);
});
$.xhr.send(null);
};
$.preprocessFinished = function(){
$.preprocessState = 2;
$.send();
};
// send() uploads the actual data in a POST call
$.send = function(){
var preprocess = $.getOpt('preprocess');
if(typeof preprocess === 'function') {
switch($.preprocessState) {
case 0: $.preprocessState = 1; preprocess($); return;
case 1: return;
case 2: break;
}
}
if($.getOpt('testChunks') && !$.tested) {
$.test();
return;
}
// Set up request and listen for event
$.xhr = new XMLHttpRequest();
// Progress
$.xhr.upload.addEventListener('progress', function(e){
if( (new Date) - $.lastProgressCallback > $.getOpt('throttleProgressCallbacks') * 1000 ) {
$.callback('progress');
$.lastProgressCallback = (new Date);
}
$.loaded=e.loaded||0;
}, false);
$.loaded = 0;
$.pendingRetry = false;
$.callback('progress');
// Done (either done, failed or retry)
var doneHandler = function(e){
var status = $.status();
if(status=='success'||status=='error') {
$.callback(status, $.message());
$.resumableObj.uploadNextChunk();
} else {
$.callback('retry', $.message());
$.abort();
$.retries++;
var retryInterval = $.getOpt('chunkRetryInterval');
if(retryInterval !== undefined) {
$.pendingRetry = true;
setTimeout($.send, retryInterval);
} else {
$.send();
}
}
};
$.xhr.addEventListener('load', doneHandler, false);
$.xhr.addEventListener('error', doneHandler, false);
$.xhr.addEventListener('timeout', doneHandler, false);
// Set up the basic query data from Resumable
var query = {
resumableChunkNumber: $.offset+1,
resumableChunkSize: $.getOpt('chunkSize'),
resumableCurrentChunkSize: $.endByte - $.startByte,
resumableTotalSize: $.fileObjSize,
resumableType: $.fileObjType,
resumableIdentifier: $.fileObj.uniqueIdentifier,
resumableFilename: $.fileObj.fileName,
resumableRelativePath: $.fileObj.relativePath,
resumableTotalChunks: $.fileObj.chunks.length
};
// Mix in custom data
var customQuery = $.getOpt('query');
if(typeof customQuery == 'function') customQuery = customQuery($.fileObj, $);
$h.each(customQuery, function(k,v){
query[k] = v;
});
var func = ($.fileObj.file.slice ? 'slice' : ($.fileObj.file.mozSlice ? 'mozSlice' : ($.fileObj.file.webkitSlice ? 'webkitSlice' : 'slice'))),
bytes = $.fileObj.file[func]($.startByte,$.endByte),
data = null,
target = $.getOpt('target');
var parameterNamespace = $.getOpt('parameterNamespace');
if ($.getOpt('method') === 'octet') {
// Add data from the query options
data = bytes;
var params = [];
$h.each(query, function(k,v){
params.push([encodeURIComponent(parameterNamespace+k), encodeURIComponent(v)].join('='));
});
target = $h.getTarget(params);
} else {
// Add data from the query options
data = new FormData();
$h.each(query, function(k,v){
data.append(parameterNamespace+k,v);
});
data.append(parameterNamespace+$.getOpt('fileParameterName'), bytes);
}
var method = $.getOpt('uploadMethod');
$.xhr.open(method, target);
if ($.getOpt('method') === 'octet') {
$.xhr.setRequestHeader('Content-Type', 'binary/octet-stream');
}
$.xhr.timeout = $.getOpt('xhrTimeout');
$.xhr.withCredentials = $.getOpt('withCredentials');
// Add data from header options
var customHeaders = $.getOpt('headers');
if(typeof customHeaders === 'function') {
customHeaders = customHeaders($.fileObj, $);
}
$h.each(customHeaders, function(k,v) {
$.xhr.setRequestHeader(k, v);
});
$.xhr.send(data);
};
$.abort = function(){
// Abort and reset
if($.xhr) $.xhr.abort();
$.xhr = null;
};
$.status = function(){
// Returns: 'pending', 'uploading', 'success', 'error'
if($.pendingRetry) {
// if pending retry then that's effectively the same as actively uploading,
// there might just be a slight delay before the retry starts
return('uploading');
} else if(!$.xhr) {
return('pending');
} else if($.xhr.readyState<4) {
// Status is really 'OPENED', 'HEADERS_RECEIVED' or 'LOADING' - meaning that stuff is happening
return('uploading');
} else {
if($.xhr.status == 200 || $.xhr.status == 201) {
// HTTP 200 or 201 (created) perfect
return('success');
} else if($h.contains($.getOpt('permanentErrors'), $.xhr.status) || $.retries >= $.getOpt('maxChunkRetries')) {
// HTTP 415/500/501, permanent error
return('error');
} else {
// this should never happen, but we'll reset and queue a retry
// a likely case for this would be 503 service unavailable
$.abort();
return('pending');
}
}
};
$.message = function(){
return($.xhr ? $.xhr.responseText : '');
};
$.progress = function(relative){
if(typeof(relative)==='undefined') relative = false;
var factor = (relative ? ($.endByte-$.startByte)/$.fileObjSize : 1);
if($.pendingRetry) return(0);
var s = $.status();
switch(s){
case 'success':
case 'error':
return(1*factor);
case 'pending':
return(0*factor);
default:
return($.loaded/($.endByte-$.startByte)*factor);
}
};
return(this);
}
// QUEUE
$.uploadNextChunk = function(){
var found = false;
// In some cases (such as videos) it's really handy to upload the first
// and last chunk of a file quickly; this let's the server check the file's
// metadata and determine if there's even a point in continuing.
if ($.getOpt('prioritizeFirstAndLastChunk')) {
$h.each($.files, function(file){
if(file.chunks.length && file.chunks[0].status()=='pending' && file.chunks[0].preprocessState === 0) {
file.chunks[0].send();
found = true;
return(false);
}
if(file.chunks.length>1 && file.chunks[file.chunks.length-1].status()=='pending' && file.chunks[file.chunks.length-1].preprocessState === 0) {
file.chunks[file.chunks.length-1].send();
found = true;
return(false);
}
});
if(found) return(true);
}
// Now, simply look for the next, best thing to upload
$h.each($.files, function(file){
if(file.isPaused()===false){
$h.each(file.chunks, function(chunk){
if(chunk.status()=='pending' && chunk.preprocessState === 0) {
chunk.send();
found = true;
return(false);
}
});
}
if(found) return(false);
});
if(found) return(true);
// The are no more outstanding chunks to upload, check is everything is done
var outstanding = false;
$h.each($.files, function(file){
if(!file.isComplete()) {
outstanding = true;
return(false);
}
});
if(!outstanding) {
// All chunks have been uploaded, complete
$.fire('complete');
}
return(false);
};
// PUBLIC METHODS FOR RESUMABLE.JS
$.assignBrowse = function(domNodes, isDirectory){
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
$h.each(domNodes, function(domNode) {
var input;
if(domNode.tagName==='INPUT' && domNode.type==='file'){
input = domNode;
} else {
input = document.createElement('input');
input.setAttribute('type', 'file');
input.style.display = 'none';
domNode.addEventListener('click', function(){
input.style.opacity = 0;
input.style.display='block';
input.focus();
input.click();
input.style.display='none';
}, false);
domNode.appendChild(input);
}
var maxFiles = $.getOpt('maxFiles');
if (typeof(maxFiles)==='undefined'||maxFiles!=1){
input.setAttribute('multiple', 'multiple');
} else {
input.removeAttribute('multiple');
}
if(isDirectory){
input.setAttribute('webkitdirectory', 'webkitdirectory');
} else {
input.removeAttribute('webkitdirectory');
}
// When new files are added, simply append them to the overall list
input.addEventListener('change', function(e){
appendFilesFromFileList(e.target.files,e);
e.target.value = '';
}, false);
});
};
$.assignDrop = function(domNodes){
if(typeof(domNodes.length)=='undefined') domNodes = [domNodes];
$h.each(domNodes, function(domNode) {
domNode.addEventListener('dragover', preventDefault, false);
domNode.addEventListener('dragenter', preventDefault, false);
domNode.addEventListener('drop', onDrop, false);
});
};
$.unAssignDrop = function(domNodes) {
if (typeof(domNodes.length) == 'undefined') domNodes = [domNodes];
$h.each(domNodes, function(domNode) {
domNode.removeEventListener('dragover', preventDefault);
domNode.removeEventListener('dragenter', preventDefault);
domNode.removeEventListener('drop', onDrop);
});
};
$.isUploading = function(){
var uploading = false;
$h.each($.files, function(file){
if (file.isUploading()) {
uploading = true;
return(false);
}
});
return(uploading);
};
$.upload = function(){
// Make sure we don't start too many uploads at once
if($.isUploading()) return;
// Kick off the queue
$.fire('uploadStart');
for (var num=1; num<=$.getOpt('simultaneousUploads'); num++) {
$.uploadNextChunk();
}
};
$.pause = function(){
// Resume all chunks currently being uploaded
$h.each($.files, function(file){
file.abort();
});
$.fire('pause');
};
$.cancel = function(){
$.fire('beforeCancel');
for(var i = $.files.length - 1; i >= 0; i--) {
$.files[i].cancel();
}
$.fire('cancel');
};
$.progress = function(){
var totalDone = 0;
var totalSize = 0;
// Resume all chunks currently being uploaded
$h.each($.files, function(file){
totalDone += file.progress()*file.size;
totalSize += file.size;
});
return(totalSize>0 ? totalDone/totalSize : 0);
};
$.addFile = function(file, event){
appendFilesFromFileList([file], event);
};
$.removeFile = function(file){
for(var i = $.files.length - 1; i >= 0; i--) {
if($.files[i] === file) {
$.files.splice(i, 1);
}
}
};
$.getFromUniqueIdentifier = function(uniqueIdentifier){
var ret = false;
$h.each($.files, function(f){
if(f.uniqueIdentifier==uniqueIdentifier) ret = f;
});
return(ret);
};
$.getSize = function(){
var totalSize = 0;
$h.each($.files, function(file){
totalSize += file.size;
});
return(totalSize);
};
$.handleDropEvent = function (e) {
onDrop(e);
};
$.handleChangeEvent = function (e) {
appendFilesFromFileList(e.target.files, e);
e.target.value = '';
};
return(this);
};
// Node.js-style export for Node and Component
if (typeof module != 'undefined') {
module.exports = Resumable;
} else if (typeof define === "function" && define.amd) {
// AMD/requirejs: Define the module
define(function(){
return Resumable;
});
} else {
// Browser: Expose to window
window.Resumable = Resumable;
}
})();
@blazehub
Copy link

Its not working out.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment