Last active
September 8, 2023 22:48
-
-
Save jlouros/9abc14239b0d9d8947a3345b99c4ebcb to your computer and use it in GitHub Desktop.
Upload folder to S3 (Node.JS)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const AWS = require("aws-sdk"); // from AWS SDK | |
const fs = require("fs"); // from node.js | |
const path = require("path"); // from node.js | |
// configuration | |
const config = { | |
s3BucketName: 'your.s3.bucket.name', | |
folderPath: '../dist' // path relative script's location | |
}; | |
// initialize S3 client | |
const s3 = new AWS.S3({ signatureVersion: 'v4' }); | |
// resolve full folder path | |
const distFolderPath = path.join(__dirname, config.folderPath); | |
// get of list of files from 'dist' directory | |
fs.readdir(distFolderPath, (err, files) => { | |
if(!files || files.length === 0) { | |
console.log(`provided folder '${distFolderPath}' is empty or does not exist.`); | |
console.log('Make sure your project was compiled!'); | |
return; | |
} | |
// for each file in the directory | |
for (const fileName of files) { | |
// get the full path of the file | |
const filePath = path.join(distFolderPath, fileName); | |
// ignore if directory | |
if (fs.lstatSync(filePath).isDirectory()) { | |
continue; | |
} | |
// read file contents | |
fs.readFile(filePath, (error, fileContent) => { | |
// if unable to read file contents, throw exception | |
if (error) { throw error; } | |
// upload file to S3 | |
s3.putObject({ | |
Bucket: config.s3BucketName, | |
Key: fileName, | |
Body: fileContent | |
}, (res) => { | |
console.log(`Successfully uploaded '${fileName}'!`); | |
}); | |
}); | |
} | |
}); |
Hi hackhat,
I have used this one which is more recent and it has perfectly work for me :
https://www.npmjs.com/package/s3-node-client
Cheerz ;)
const fs = require('fs');
const path = require('path');
const async = require('async');
const AWS = require('aws-sdk');
const readdir = require('recursive-readdir');
const { BUCKET, KEY, SECRET } = process.env;
const rootFolder = path.resolve(__dirname, './');
const uploadFolder = './upload-folder';
const s3 = new AWS.S3({
signatureVersion: 'v4',
accessKeyId: KEY,
secretAccessKey: SECRET,
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function deploy(upload) {
if (!BUCKET || !KEY || !SECRET) {
throw new Error('you must provide env. variables: [BUCKET, KEY, SECRET]');
}
const filesToUpload = await getFiles(path.resolve(__dirname, upload));
return new Promise((resolve, reject) => {
async.eachOfLimit(filesToUpload, 10, async.asyncify(async (file) => {
const Key = file.replace(`${rootFolder}/`, '');
console.log(`uploading: [${Key}]`);
return new Promise((res, rej) => {
s3.upload({
Key,
Bucket: BUCKET,
Body: fs.readFileSync(file),
}, (err) => {
if (err) {
return rej(new Error(err));
}
res({ result: true });
});
});
}), (err) => {
if (err) {
return reject(new Error(err));
}
resolve({ result: true });
});
});
}
deploy(uploadFolder)
.then(() => {
console.log('task complete');
process.exit(0);
})
.catch((err) => {
console.error(err.message);
process.exit(1);
});
@sarkistlt Thanks for this it works great
This is what I use
const path = require('path')
const util = require('util')
const exec = util.promisify(require('child_process').exec)
const distDir = path.join('someDir', 'dist')
const command = `aws s3 sync ${distDir} s3://bucket-name`
exec(command)
.then(() => console.log('Deploy complete'))
.catch(err => {
console.log(err)
})
Important notes:
- upload a directory and its sub-directories recursively;
- could be an absolute or relative path to a directory;
params
andoptions
are the same as in the AWS documentation so theses functions are very flexible;rootKey
is the root AWS key to use, by default it is the S3 root, e.g. sayingrootKey
ispublic/images
and you want to upload/Users/you/my-project/images
, files will be uploaded tos3://bucket/public/images
;aws-sdk
will automatically check forAWS_ACCESS_KEY_ID
andAWS_SECRET_ACCESS_KEY
environment variables, it is the safest way to deal with credentials imo;- without clustering I found uploading a directory of 1254 files was nearly 2 times faster than the native AWS CLI
sync
method (it's Python underneath, Node.js should be faster); - don't forget to add file's content-type, mostly for static websites, or it would be set to
application/octet-stream
by default and lead to unexpected behaviors; - use your favorite debugger/logger over
console
; const x = { ...params };
is the same asObject.assign
BUT will not deeply clone objects which could lead to unexpected object mutations, prefer a safe clone function or similar;- tested with Node.js 12.15.0;
- improve this by clustering the whole upload, some extra code/controls will be needed (based on files' length, number of files, available cores, etc.).
const { createReadStream, promises: { readdir, stat: getStats } } = require('fs');
const { resolve, join } = require('path');
const S3 = require('aws-sdk/clients/s3');
const { getMIMEType } = require('node-mime-types');
const s3 = new S3({
signatureVersion: 'v4',
});
// upload file
const uploadFile = async function uploadFile({ path, params, options } = {}) {
const parameters = { ...params };
const opts = { ...options };
try {
const rstream = createReadStream(resolve(path));
rstream.once('error', (err) => {
console.error(`unable to upload file ${path}, ${err.message}`);
});
parameters.Body = rstream;
parameters.ContentType = getMIMEType(path);
await s3.upload(parameters, opts).promise();
console.info(`${parameters.Key} (${parameters.ContentType}) uploaded in bucket ${parameters.Bucket}`);
} catch (e) {
throw new Error(`unable to upload file ${path} at ${parameters.Key}, ${e.message}`);
}
return true;
};
// upload directory and its sub-directories if any
const uploadDirectory = async function uploadDirectory({
path,
params,
options,
rootKey,
} = {}) {
const parameters = { ...params };
const opts = { ...options };
const root = rootKey && rootKey.constructor === String ? rootKey : '';
let dirPath;
try {
dirPath = resolve(path);
const dirStats = await getStats(dirPath);
if (!dirStats.isDirectory()) {
throw new Error(`${dirPath} is not a directory`);
}
console.info(`uploading directory ${dirPath}...`);
const filenames = await readdir(dirPath);
if (Array.isArray(filenames)) {
await Promise.all(filenames.map(async (filename) => {
const filepath = `${dirPath}/${filename}`;
const fileStats = await getStats(filepath);
if (fileStats.isFile()) {
parameters.Key = join(root, filename);
await uploadFile({
path: filepath,
params: parameters,
options: opts,
});
} else if (fileStats.isDirectory()) {
await uploadDirectory({
params,
options,
path: filepath,
rootKey: join(root, filename),
});
}
}));
}
} catch (e) {
throw new Error(`unable to upload directory ${path}, ${e.message}`);
}
console.info(`directory ${dirPath} successfully uploaded`);
return true;
};
// example
(async () => {
try {
console.time('s3 upload');
await uploadDirectory({
path: '../front/dist',
params: {
Bucket: 'my-bucket',
},
options: {},
rootKey: '',
});
console.timeEnd('s3 upload');
} catch (e) {
console.error(e);
}
})();
We've just released a package for that at https://github.com/thousandxyz/s3-lambo, fully tested. You can use the code or the package at your need.
I fixed few minor errors with paths / other things and
published this as a package
https://www.npmjs.com/package/s3-upload-folder/v/latest
the source (MIT)
https://github.com/Prozi/s3-upload-folder/blob/main/index.js
uses standard S3 sdk authentication (need to read about this if you don't know what I mean)
enjoy!
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Added one that works on windows and keeps the file structure intact https://gist.github.com/hackhat/cc0adf1317eeedcec52b1a4ff38f738b
other examples are not working properly on windows.