-
-
Save milesrichardson/db724faf7615f0ea208590a52da2c0eb to your computer and use it in GitHub Desktop.
const AWS = require('aws-sdk'); | |
const fs = require('fs') | |
const s3download = (bucketName, keyName, localDest) => { | |
if (typeof localDest == 'undefined') { | |
localDest = keyName; | |
} | |
let params = { | |
Bucket: bucketName, | |
Key: keyName | |
} | |
let file = fs.createWriteStream(localDest) | |
return new Promise((resolve, reject) => { | |
s3.getObject(params).createReadStream() | |
.on('end', () => { | |
return resolve(); | |
}) | |
.on('error', (error) => { | |
return reject(error); | |
}).pipe(file) | |
}); | |
}; |
thx!
@mostafazh thank you for my eyes 👍
Thanks!
It works fine but in some cases is missing some bytes, the original file is 10.97KB and the downloaded file is 8.30KB why so?
A late answer but the problem is what at first glance seems as only a semantic error.
The Promise
resolves on the end
event of the read stream. This event is triggered when no more data can be read from the read stream. Meaning that the end of the read stream has been reached.
However, this does not necessarily mean, that all the data has already been written to disk in the example.
So the Promise
should actually only resolve()
once
the write stream sends a finish
event.
const s3 = new AWS.S3();
const params = {
Bucket: bucketName,
Key: keyName
};
const readStream = s3.getObject(params).createReadStream();
// Error handling in read stream
readStream.on("error", (e) => {
console.error(e);
reject(e);
});
// Resolve only if we are done writing
writeStream.once('finish', () => {
resolve(filename);
});
// pipe will automatically finish the write stream once done
readStream.pipe(writeStream);
Same code but did some minor cleanups and adds missing import. @milesrichardson
var AWS = require('aws-sdk'); const fs = require('fs'); var s3 = new AWS.S3(); const s3download = (bucketName, keyName, localDest) => { if (typeof localDest == 'undefined') { localDest = keyName; } let params = { Bucket: bucketName, Key: keyName }; let file = fs.createWriteStream(localDest); return new Promise((resolve, reject) => { s3.getObject(params).createReadStream() .on('end', () => { return resolve(); }) .on('error', (error) => { return reject(error); }).pipe(file); }); };
Great solutiion
I think this the solution from @steima is the correct implementation, as for me I was using the 1st solution where the end
event came but does not mean the files were written, my next step which reads the files out, turns to be an incomplete file.
Same code but did some minor cleanups and adds missing import. @milesrichardson
var AWS = require('aws-sdk'); const fs = require('fs'); var s3 = new AWS.S3(); const s3download = (bucketName, keyName, localDest) => { if (typeof localDest == 'undefined') { localDest = keyName; } let params = { Bucket: bucketName, Key: keyName }; let file = fs.createWriteStream(localDest); return new Promise((resolve, reject) => { s3.getObject(params).createReadStream() .on('end', () => { return resolve(); }) .on('error', (error) => { return reject(error); }).pipe(file); }); };
A naive question: Why the resolve and reject is on read stream but not write stream? Thanks!
Same code but did some minor cleanups and adds missing import. @milesrichardson