-
Star
(129)
You must be signed in to star a gist -
Fork
(32)
You must be signed in to fork a gist
-
-
Save sevastos/5804803 to your computer and use it in GitHub Desktop.
// Based on Glacier's example: http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/examples.html#Amazon_Glacier__Multi-part_Upload | |
var fs = require('fs'); | |
var AWS = require('aws-sdk'); | |
AWS.config.loadFromPath('./aws-config.json'); | |
var s3 = new AWS.S3(); | |
// File | |
var fileName = '5.pdf'; | |
var filePath = './' + fileName; | |
var fileKey = fileName; | |
var buffer = fs.readFileSync('./' + filePath); | |
// S3 Upload options | |
var bucket = 'loctest'; | |
// Upload | |
var startTime = new Date(); | |
var partNum = 0; | |
var partSize = 1024 * 1024 * 5; // Minimum 5MB per chunk (except the last part) http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html | |
var numPartsLeft = Math.ceil(buffer.length / partSize); | |
var maxUploadTries = 3; | |
var multiPartParams = { | |
Bucket: bucket, | |
Key: fileKey, | |
ContentType: 'application/pdf' | |
}; | |
var multipartMap = { | |
Parts: [] | |
}; | |
function completeMultipartUpload(s3, doneParams) { | |
s3.completeMultipartUpload(doneParams, function(err, data) { | |
if (err) { | |
console.log("An error occurred while completing the multipart upload"); | |
console.log(err); | |
} else { | |
var delta = (new Date() - startTime) / 1000; | |
console.log('Completed upload in', delta, 'seconds'); | |
console.log('Final upload data:', data); | |
} | |
}); | |
} | |
function uploadPart(s3, multipart, partParams, tryNum) { | |
var tryNum = tryNum || 1; | |
s3.uploadPart(partParams, function(multiErr, mData) { | |
if (multiErr){ | |
console.log('multiErr, upload part error:', multiErr); | |
if (tryNum < maxUploadTries) { | |
console.log('Retrying upload of part: #', partParams.PartNumber) | |
uploadPart(s3, multipart, partParams, tryNum + 1); | |
} else { | |
console.log('Failed uploading part: #', partParams.PartNumber) | |
} | |
return; | |
} | |
multipartMap.Parts[this.request.params.PartNumber - 1] = { | |
ETag: mData.ETag, | |
PartNumber: Number(this.request.params.PartNumber) | |
}; | |
console.log("Completed part", this.request.params.PartNumber); | |
console.log('mData', mData); | |
if (--numPartsLeft > 0) return; // complete only when all parts uploaded | |
var doneParams = { | |
Bucket: bucket, | |
Key: fileKey, | |
MultipartUpload: multipartMap, | |
UploadId: multipart.UploadId | |
}; | |
console.log("Completing upload..."); | |
completeMultipartUpload(s3, doneParams); | |
}); | |
} | |
// Multipart | |
console.log("Creating multipart upload for:", fileKey); | |
s3.createMultipartUpload(multiPartParams, function(mpErr, multipart){ | |
if (mpErr) { console.log('Error!', mpErr); return; } | |
console.log("Got upload ID", multipart.UploadId); | |
// Grab each partSize chunk and upload it as a part | |
for (var rangeStart = 0; rangeStart < buffer.length; rangeStart += partSize) { | |
partNum++; | |
var end = Math.min(rangeStart + partSize, buffer.length), | |
partParams = { | |
Body: buffer.slice(rangeStart, end), | |
Bucket: bucket, | |
Key: fileKey, | |
PartNumber: String(partNum), | |
UploadId: multipart.UploadId | |
}; | |
// Send a single part | |
console.log('Uploading part: #', partParams.PartNumber, ', Range start:', rangeStart); | |
uploadPart(s3, multipart, partParams); | |
} | |
}); |
Upload with progress bar...this is a tested code.
S3.upload(params, options, function (err, data) {
if (err) {
reject("error");
}
alert("Successfully Uploaded!");
}).on("httpUploadProgress", (progress) => {
let uploaded = parseInt((progress.loaded * 100) / progress.total);
this.setState({
progress: uploaded,
uploadText: "Uploading...",
uploading: true,
});
});
data
How to call the above function for multiple large files
Example:
FileList - [file1, file2]
let PromiseArray = []
for(file in FileList){
PromiseArray.push(multiPart(file))
}
Promise.all(PromiseArray)
.then(result => {
//succese
})
when I can parallel the multiPart function its mismatch file 1 and file 2 data and overwrite one of the file for both
Any Solution ?
/** * initiate a multipart upload and get an upload ID that must include in upload part request. * Each part must be at least 5 MB in size, except the last part. */ async multiPart(options) { const { data, bucket, key } = options; const multiPartParams = { Bucket: bucket, Key: key }; const multipart = await this._client.createMultipartUpload(multiPartParams).promise(); const multipartMap = { Parts: [] }; let partNum = 0; for (const d of data) { partNum += 1; const partParams = { ...multiPartParams, Body: d, PartNumber: String(partNum), UploadId: multipart.UploadId }; const result = await this._client.uploadPart(partParams).promise(); multipartMap.Parts[partNum - 1] = { ETag: result.ETag, PartNumber: Number(partNum) }; } const doneParams = { ...multiPartParams, MultipartUpload: multipartMap, UploadId: multipart.UploadId }; const result = await this._client.completeMultipartUpload(doneParams).promise(); return result; }
this._client.createMultipartUpload is available in which npm package ?
Please Reply
Does this code work ??
Just one question. Does this occupy the nodejs server's disk space during the upload process?
thanks for the input!
I created a similar script based on the async package including retries that might be a bit easier to use & understand for some ppl!
I am looking into your script, and wondering if there is a way i can resume broken uploads? For eg if my frontend app has uploaded PartNumber:2 and then encountered a broken internet connection, is it possible to resume the upload from where it got last uploaded i.e. partNumber2 instead of uploading from 0?
/** * initiate a multipart upload and get an upload ID that must include in upload part request. * Each part must be at least 5 MB in size, except the last part. */ async multiPart(options) { const { data, bucket, key } = options; const multiPartParams = { Bucket: bucket, Key: key }; const multipart = await this._client.createMultipartUpload(multiPartParams).promise(); const multipartMap = { Parts: [] }; let partNum = 0; for (const d of data) { partNum += 1; const partParams = { ...multiPartParams, Body: d, PartNumber: String(partNum), UploadId: multipart.UploadId }; const result = await this._client.uploadPart(partParams).promise(); multipartMap.Parts[partNum - 1] = { ETag: result.ETag, PartNumber: Number(partNum) }; } const doneParams = { ...multiPartParams, MultipartUpload: multipartMap, UploadId: multipart.UploadId }; const result = await this._client.completeMultipartUpload(doneParams).promise(); return result; }
Hi! Your solution looks very good, but i don't understand what kind of data
i should use? I mean your data
looks like it is an iterable object
@jotta008 Thank you !
Will this work with file size of 600 mb?
This might help.
const params = {
ACL: "public-read",
Key: "sample-videofile",
ContentType: video/mp4,
Body: file,
ContentLength: file.size,
Metadata: {
user_email: "[email protected]",
user_id: "300",
},
};