Skip to content

Instantly share code, notes, and snippets.

View btrdnch's full-sized avatar

nope btrdnch

View GitHub Profile
@btrdnch
btrdnch / [s3-redeploy]-parallel-hashes-computation.js
Created January 9, 2019 10:31
[s3-redeploy]-parallel-hashes-computation
const crypto = require('crypto');
const fs = require('fs');
// Limit of files to be processed simultaneously
const concurrency = 3;
// Promisified hash computation with streams
function computeSingleFileHash(fileName) {
const hash = crypto.createHash('md5');
const fileStream = fs.createReadStream(fileName);
@btrdnch
btrdnch / [s3-redeploy]-parallel-files-uploading.js
Last active January 9, 2019 10:26
[s3-redeploy]-parallel-files-uploading
// Limit of promises executed in parallel
// We don't want to upload more than 3 files simultaneously
const concurrency = 3;
// Returns a promise, which means uploading is fired once function is called
function uploadSingleFileToS3(fileName) {
const fileStream = fs.createReadStream(fileName);
return s3Client.upload({
Key: fileName,
Body: fileStream,
@btrdnch
btrdnch / [s3-redeploy]-sequential-files-uploading.js
Last active January 9, 2019 09:48
[s3-redeploy]-sequential-files-uploading
const fs = require('fs');
const aws = require('aws-sdk');
const s3Client = new aws.S3();
async function uploadFilesToS3(fileNames) {
for (const fileName of fileNames) {
const fileStream = fs.createReadStream(fileName);
await s3Client.upload({
Key: fileName,
Body: fileStream,