Skip to content

Instantly share code, notes, and snippets.

@bartwttewaall
Last active July 22, 2019 09:24
Show Gist options
  • Save bartwttewaall/e2334740af69a4ec638c7cf9c3a902bf to your computer and use it in GitHub Desktop.
Save bartwttewaall/e2334740af69a4ec638c7cf9c3a902bf to your computer and use it in GitHub Desktop.
AWS-SDK upload script, specially setup for uploading .unityweb files (gzipped webassembly files) which sets the correct content type
/** You'll need to provide a .env file at the root folder containing an endpoint, bucket name and access credentials
* # AWS_ENDPOINT=https://ams3.digitaloceanspaces.com
* # AWS_ACCESS_ID=
* # AWS_SECRET_KEY=
* # AWS_BUCKET_NAME=
*/
const dotenv = require("dotenv");
const fs = require("fs");
const path = require("path");
const mime = require("mime");
const async = require("async");
const AWS = require("aws-sdk");
const readdir = require("recursive-readdir");
const argv = require("minimist")(process.argv.slice(2));
const chalk = require("chalk");
const log = console.log;
const info = (...input) => console.log(chalk.blue(input));
const success = (...input) => console.log(chalk.green(input));
const error = (...input) => console.log(chalk.bold.red(input));
const { AWS_ENDPOINT, AWS_ACCESS_ID, AWS_SECRET_KEY, AWS_BUCKET_NAME } = dotenv.config().parsed;
const sourceFolder = path.resolve(__dirname, argv.source || "Builds/webgl/Build");
const destinationFolder = argv.dest || "Build";
const s3 = new AWS.S3({
endpoint: AWS_ENDPOINT || "https://s3.amazonaws.com",
signatureVersion: "v4",
accessKeyId: AWS_ACCESS_ID,
secretAccessKey: AWS_SECRET_KEY
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function deploy(upload) {
if (!AWS_BUCKET_NAME || !AWS_ACCESS_ID || !AWS_SECRET_KEY) {
throw new Error("You need to provide all .env variables: [AWS_ACCESS_ID, AWS_SECRET_KEY, AWS_BUCKET_NAME]");
}
const filesToUpload = await getFiles(path.resolve(__dirname, upload));
info(`Uploading ${filesToUpload.length} files...`);
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async (file) => {
const Key = file.replace(`${sourceFolder}`, destinationFolder);
const isUnityWeb = file.endsWith(".unityweb");
log(`uploading: [${Key}]`);
return new Promise((res, rej) => {
s3.upload(
{
Key,
Bucket: AWS_BUCKET_NAME,
Body: fs.readFileSync(file),
ACL: "public-read",
ContentType: isUnityWeb ? "octet-stream/javascript" : mime.getType(file), // would otherwise default to: "application/vnd.unity"
ContentEncoding: isUnityWeb ? "gzip" : undefined // gzip is the default compression method for unity, other options are: brotli or disabled
},
(err, data) => {
if (err) {
return rej(new Error(err));
}
success(`upload complete: [${Key}]`);
res({ result: true });
}
);
});
}),
(err) => {
if (err) {
error(err);
return reject(new Error(err));
}
resolve({ result: true });
}
);
});
}
deploy(sourceFolder)
.then(() => {
info("Deployment complete");
process.exit(0);
})
.catch((err) => {
error(err.message);
process.exit(1);
});
{
"name": "aws-deploy",
"version": "1.0.0",
"description": "File upload script for use with AWS-SDK",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"deploy": "node AWSDeploy --source Builds/webgl/Build"
},
"keywords": [],
"author": "Bart Wttewaall",
"license": "ISC",
"dependencies": {
"async": "2.6.2",
"aws-sdk": "2.440.0",
"chalk": "2.4.2",
"dotenv": "7.0.0",
"mime": "2.4.2",
"minimist": "1.2.0",
"recursive-readdir": "2.2.2"
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment