Important notes:
- upload a directory and its sub-directories recursively;
- could be an absolute or relative path to a directory;
params
andoptions
are the same as in the AWS documentation so theses functions are very flexible;rootKey
is the root AWS key to use, by default it is the S3 root, e.g. sayingrootKey
ispublic
and you want to upload/Users/you/my-project/images
, files will be uploaded tos3://bucket/public/images
;aws-sdk
will automatically check forAWS_ACCESS_KEY_ID
andAWS_SECRET_ACCESS_KEY
environment variables, it is the safest way to deal with credentials imo;- without clustering I found uploading a directory of 1254 files was nearly 2 times faster than the native AWS CLI
sync
method (it's Python underneath, Node.js should be faster); - don't forget to add file's content-type, mostly for static websites, or it would be set to
application/octet-stream
by default and lead to unexpected behaviors; - use your favorite debugger/logger over
console
; const x = { ...params };
is the same asObject.assign
BUT will not deeply clone objects which could lead to unexpected object mutations, prefer a safe clone function or similar;- tested with Node.js 12.15.0;
- improve this by clustering the whole upload, some extra code/controls will be needed (based on files' length, number of files, available cores, etc.).
const { createReadStream, promises: { readdir, stat: getStats } } = require('fs');
const { resolve, join } = require('path');
const S3 = require('aws-sdk/clients/s3');
const { getMIMEType } = require('node-mime-types');
const s3 = new S3({
signatureVersion: 'v4',
});
// upload file
const uploadFile = async function uploadFile({ path, params, options } = {}) {
const parameters = { ...params };
const opts = { ...options };
try {
const rstream = createReadStream(resolve(path));
rstream.once('error', (err) => {
console.error(`unable to upload file ${path}, ${err.message}`);
});
parameters.Body = rstream;
parameters.ContentType = getMIMEType(path);
await s3.upload(parameters, opts).promise();
console.info(`${parameters.Key} (${parameters.ContentType}) uploaded in bucket ${parameters.Bucket}`);
} catch (e) {
throw new Error(`unable to upload file ${path} at ${parameters.Key}, ${e.message}`);
}
return true;
};
// upload directory and its sub-directories if any
const uploadDirectory = async function uploadDirectory({
path,
params,
options,
rootKey,
} = {}) {
const parameters = { ...params };
const opts = { ...options };
const root = rootKey && rootKey.constructor === String ? rootKey : '';
let dirPath;
try {
dirPath = resolve(path);
const dirStats = await getStats(dirPath);
if (!dirStats.isDirectory()) {
throw new Error(`${dirPath} is not a directory`);
}
console.info(`uploading directory ${dirPath}...`);
const filenames = await readdir(dirPath);
if (Array.isArray(filenames)) {
await Promise.all(filenames.map(async (filename) => {
const filepath = `${dirPath}/${filename}`;
const fileStats = await getStats(filepath);
if (fileStats.isFile()) {
parameters.Key = join(root, filename);
await uploadFile({
path: filepath,
params: parameters,
options: opts,
});
} else if (fileStats.isDirectory()) {
await uploadDirectory({
params,
options,
path: filepath,
rootKey: join(root, filename),
});
}
}));
}
} catch (e) {
throw new Error(`unable to upload directory ${path}, ${e.message}`);
}
console.info(`directory ${dirPath} successfully uploaded`);
return true;
};
// example
(async () => {
try {
console.time('s3 upload');
await uploadDirectory({
path: '../front/dist',
params: {
Bucket: 'my-bucket',
},
options: {},
rootKey: '',
});
console.timeEnd('s3 upload');
} catch (e) {
console.error(e);
}
})();