Last active
April 2, 2018 18:33
-
-
Save chadkirby/d6752314aeead8314c0b9efb6d74c873 to your computer and use it in GitHub Desktop.
node script to recursively copy files from a local directory to an s3 bucket
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env node | |
const { lstatSync, readdirSync, readFileSync } = require('fs'); | |
const { join, resolve } = require('path'); | |
const ProgressBar = require('progress'); | |
const { coroutine: co } = require('bluebird'); | |
const Bucket = `xxx`; | |
// http://github.com/chadkirby/s3-promises | |
const s3 = require(`s3-promises`)({ Bucket }, { maxRetries: 3 }); | |
// find the local directory to copy files out of | |
const baseDir = resolve(__dirname, `..`, `assets`); | |
// files will be copied from ../assets to xxx/yyy/assets in s3 | |
const s3Base = `yyy`; | |
function isDirectory(source) { | |
return lstatSync(source).isDirectory(); | |
} | |
function isFile(source) { | |
return !isDirectory(source); | |
} | |
function getDirectories(source) { | |
return readDir(source).filter(isDirectory); | |
} | |
function getFiles(source) { | |
return readDir(source).filter(isFile); | |
} | |
function readDir(source) { | |
return readdirSync(source) | |
.filter((name) => !name.startsWith('.')) | |
.map((name) => join(source, name)); | |
} | |
function* uploadContents(dir) { | |
let files = getFiles(dir); | |
if (files.length) { | |
let bar = new ProgressBar( | |
`uploading files in ${dir} [:bar] :rate uploads/sec :etas to go`, | |
{ total: files.length } | |
); | |
yield Promise.all(files.map((source, ii) => { | |
let Key = source.replace(baseDir, s3Base); | |
let Body = readFileSync(source, `utf-8`); | |
return s3.uploadAsync({ Key, Body, ContentType: `text/plain` }).then(bar.tick.bind(bar)); | |
})); | |
} | |
let dirs = getDirectories(dir); | |
yield Promise.all(dirs.map(co(uploadContents))); | |
} | |
co(function*() { | |
try { | |
// delete files before uploading new ones | |
let Objects = yield s3.listAllObjectsAsync({ Prefix: s3Base }); | |
Objects = Objects.map(({ Key }) => ({ Key })); | |
if (Objects.length) { | |
yield s3.deleteObjectsAsync({ Delete: { Objects } }); | |
} | |
yield *uploadContents(baseDir); | |
} catch (err) { | |
console.log(err); | |
process.exit(1); | |
} | |
})(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment