Created
April 18, 2019 23:03
-
-
Save ashwinrayaprolu/def342ad76efc85024c0a5405d91193b to your computer and use it in GitHub Desktop.
Upload folder to AWS, WASABI and other cloud storage providers in NodeJS
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'use strict'; | |
var AWS = require('aws-sdk'); | |
var path = require("path"); | |
var fs = require('fs'); | |
const sns = new AWS.SNS(); | |
// Setup config for loggings | |
const winston = require('winston'); | |
winston.level = 'debug'; | |
winston.log('debug', 'Now my debug messages are written to console!'); | |
const files = new winston.transports.File({ filename: 'logs/combined.log' }); | |
const console = new winston.transports.Console(); | |
//winston.add(console); | |
//winston.add(files); | |
const logger = winston.createLogger({ | |
transports: [console,files] | |
}); | |
logger.info('logging to file and console transports'); | |
var config = JSON.parse(fs.readFileSync('config.json', 'utf8')); | |
function uploadArtifactsToS3() { | |
const s3Path = config.FolderToSync; | |
var accessKeyId = config.accessKeyId; | |
var secretAccessKey = config.secretAccessKey; | |
var endpoint = new AWS.Endpoint(config.endpoint); | |
logger.info(s3Path); | |
var s3 = new AWS.S3({ | |
endpoint: endpoint, | |
accessKeyId: accessKeyId, | |
secretAccessKey: secretAccessKey | |
}); | |
/*** | |
* Few utility funcitons below | |
*/ | |
const getMessageAttributes = function (metaData) { | |
let messageAttributes = {}; | |
Object.entries(metaData).forEach( | |
([key, value]) => { | |
messageAttributes[key] = { | |
DataType: 'String', | |
StringValue: value | |
} | |
} | |
); | |
return messageAttributes; | |
} | |
const getFilesizeInBytes = (filename) => { | |
const stats = fs.statSync(filename) | |
const fileSizeInBytes = stats.size | |
return fileSizeInBytes | |
}; | |
const bytesToSize = (bytes) => { | |
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; | |
if (bytes == 0) return '0 Byte'; | |
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(1024))); | |
return Math.round(bytes / Math.pow(1024, i), 2) + ' ' + sizes[i]; | |
}; | |
/*** | |
* Utility functions till here | |
*/ | |
const sendSns = async (record, messageAttributes) => { | |
let params = { | |
TopicArn: SNS_TOPIC_ARN, | |
Message: JSON.stringify(record), | |
MessageStructure: 'string', | |
MessageAttributes: messageAttributes | |
} | |
return await sns.publish(params).promise(); | |
}; | |
const fetchS3MetaData = async (params) => { | |
try { | |
let response = await s3.headObject(params).promise(); | |
return response; | |
} catch (error) { | |
logger.info(error); | |
return {}; | |
} | |
}; | |
/** | |
* Below method will create directory in bucket configured for given path and prefix | |
* @param {*} base : Path that needs to be created | |
*/ | |
const createDirectory = async (base) => { | |
let params = { | |
Bucket: config.s3Bucket, | |
Key: config.folderPrefix + encodeURIComponent(base) + '/', | |
}; | |
let resp = await s3.putObject(params).promise(); | |
return resp; | |
}; | |
/** | |
* | |
* @param {*} bucketPath | |
* @param {*} params | |
*/ | |
const uploadFile = async (bucketPath, params) => { | |
var options = { | |
partSize: 10 * 1024 * 1024, // 10 MB | |
queueSize: 10 | |
}; | |
logger.info('Uploading ' + bucketPath + ' to ' + config.s3Bucket); | |
// create the promise object | |
let resp = await s3.upload(params, options, function (err, data) { | |
if (!err) { | |
//logger.info(data); // successful response | |
logger.info('Successfully uploaded ' + bucketPath + ' to ' + config.s3Bucket); | |
} else { | |
logger.info(err); // an error occurred | |
} | |
}).promise(); | |
//logger.info(`Successfully uploaded ${bucketPath} to s3 bucket`); | |
return resp; | |
}; | |
/** | |
* | |
* @param {*} currentDirPath : | |
* @param {*} callback | |
*/ | |
const walkSync = (currentDirPath, callback) => { | |
fs.readdirSync(currentDirPath).forEach((name) => { | |
const filePath = path.join(currentDirPath, name); | |
const stat = fs.statSync(filePath); | |
if (stat.isFile()) { | |
callback(filePath, stat); | |
} else if (stat.isDirectory()) { | |
let bucketFilePath = filePath.substring(s3Path.length); | |
//logger.info("Creating directory :" + bucketFilePath); | |
createDirectory(bucketFilePath); | |
walkSync(filePath, callback); | |
} | |
}); | |
}; | |
walkSync(s3Path, async (filePath) => { | |
let bucketPath = filePath.substring(s3Path.length); | |
let localFileSize = getFilesizeInBytes(filePath); | |
let params = { | |
Bucket: config.s3Bucket, | |
Key: config.folderPrefix + bucketPath, | |
Body: fs.readFileSync(filePath) | |
}; | |
let headParams = { | |
Bucket: config.s3Bucket, | |
Key: config.folderPrefix + bucketPath | |
}; | |
try { | |
//await s3.putObject(params).promise(); | |
let cloudSize = undefined; | |
try { | |
let headObjectMetaData = await fetchS3MetaData(headParams); | |
//logger.info(Object.entries(headObjectMetaData)); | |
if (typeof headObjectMetaData != "undefined" && typeof headObjectMetaData.ContentLength != "undefined") { | |
cloudSize = headObjectMetaData.ContentLength; | |
} | |
} catch (error) { | |
logger.info(`Error getting metadata for object :${bucketPath}`); | |
} | |
//logger.info("Processing " + bucketPath + " Cloud Size: " + cloudSize + " LocalSize : " + localFileSize) | |
if (typeof cloudSize === 'undefined' || cloudSize != localFileSize) { | |
uploadFile(bucketPath, params); | |
}// Condition to check size | |
} catch (error) { | |
logger.info(error); | |
console.error(`error in uploading ${bucketPath} to s3 bucket`); | |
throw new Error(`error in uploading ${bucketPath} to s3 bucket`); | |
} | |
}); | |
} | |
uploadArtifactsToS3(); | |
logger.on('finish', function (info) { | |
// All `info` log messages has now been logged | |
}); | |
logger.info('CHILL WINSTON!', { seriously: true }); | |
logger.end(); | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment