Last active
December 2, 2017 21:06
-
-
Save joedee/8427118 to your computer and use it in GitHub Desktop.
File upload to S3 with Hapi
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { "accessKeyId": "yourkey", "secretAccessKey": "yoursecretkey", "region": "us-east-1" } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| <form id="fileupload" method="POST" action="/image/upload" enctype="multipart/form-data"> | |
| <input type="file" name="files" multiple> | |
| </form> |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { method: 'POST', path: '/image/upload', config: { handler: uploadHandler.upload } }, |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // Handlebars templating engine | |
| var options = { | |
| views: { | |
| path: 'templates', | |
| engines: { | |
| html: 'handlebars' | |
| } | |
| }, | |
| cors: true, | |
| payload: { | |
| multipart: { | |
| mode: "file", | |
| uploadDir: "./uploads" | |
| }, | |
| maxBytes: 10048576, | |
| }, | |
| timeout: { | |
| client: '500000', | |
| socket: '600000' | |
| } | |
| }; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // Filesystem | |
| var fs = require('fs'); | |
| //ImageMagick for image manipulation | |
| var im = require('imagemagick'); | |
| // AWS SDK | |
| var AWS = require('aws-sdk'); | |
| AWS.config.loadFromPath('./aws_config.json'); | |
| //UUID | |
| var uuid = require('node-uuid'); | |
| //Moment for Date/Time | |
| var moment = require('moment'); | |
| exports.upload = function (request) { | |
| // Check if POST of GET | |
| if (request.payload) { | |
| // Set uploaded file(s) | |
| var f = request.payload.files; | |
| // Get path of uploaded file(s) | |
| var path = f.path; | |
| // Get image name(s) of uploaded file(s) | |
| var imageName = f.originalFilename | |
| // Set path/file for thumbnail(s) | |
| var thumbPath = __dirname + "/uploads/thumbs/" + imageName; | |
| // Read in file object | |
| fs.readFile(path, function (err, data) { | |
| /// Log an error | |
| if(!imageName){ | |
| console.log("There was an error") | |
| } else { | |
| // Use ImageMagick to copy/resize to a new thumbnail | |
| fs.writeFile(path, data, function (err) { | |
| im.resize({ | |
| srcPath: path, | |
| dstPath: thumbPath, | |
| width: 80 | |
| }, function(err, stdout, stderr){ | |
| if (err) throw err; | |
| console.log('Resized'); | |
| // Upload thumbail to S3 buckets | |
| var s3 = new AWS.S3(); | |
| fs.readFile(thumbPath, function(err, thumb_buffer){ | |
| var params = { | |
| Bucket: 'yourbucketname', | |
| Key: 'thumb_' + f.originalFilename, | |
| Body: thumb_buffer, | |
| ACL:'public-read' | |
| }; | |
| s3.putObject(params, function (perr, pres) { | |
| if (perr) { | |
| console.log("Error uploading data: ", perr); | |
| } else { | |
| console.log("Successfully uploaded thumb to myBucket/myKey"); | |
| // Delete the thumbnail file from server | |
| fs.unlink(thumbPath, function (err) { | |
| if (err) throw err; | |
| console.log('Successfully deleted ' + f.path); | |
| }); | |
| } | |
| }); | |
| }); | |
| }); | |
| }); | |
| } | |
| }); | |
| // Upload original file to S3 bucket | |
| var s3 = new AWS.S3(); | |
| fs.readFile(path, function(err, file_buffer){ | |
| var params = { | |
| Bucket: 'yourbucketname', | |
| Key: f.originalFilename, | |
| Body: file_buffer, | |
| ACL:'public-read' | |
| }; | |
| s3.putObject(params, function (perr, pres) { | |
| if (perr) { | |
| console.log("Error uploading data: ", perr); | |
| } else { | |
| console.log("Successfully uploaded data to myBucket/myKey"); | |
| // Delete the original file from server | |
| fs.unlink(path, function (err) { | |
| if (err) throw err; | |
| console.log('Successfully deleted path: ' + f.path); | |
| }); | |
| // JSON return for JQuery Upload | |
| request.reply('{"files": [{ "name": "' + f.originalFilename + '","size": ' + f.size + ',"url": "https:\/\/s3.amazonaws.com\/yourbucketname\/' + f.originalFilename + '","thumbnailUrl": "https:\/\/s3.amazonaws.com\/yourbucketname\/thumb_' + f.originalFilename + '","deleteUrl": "https:\/\/s3.amazonaws.com\/yourbucketname\/' + f.originalFilename + '","deleteType": "DELETE"}]}'); | |
| } | |
| }); | |
| }); | |
| } | |
| else { | |
| // GET request reply | |
| request.reply("Ready"); | |
| } | |
| } |
Author
great, thanx!
I've resolved the issue, but I've use hapi:
var serverOptions = {
payload: { multipart: 'file' },
cors: {
credentials: true,
additionalHeaders: ['X-Requested-With']
}
};
and the route (post):
payload: 'parse'
and then nodejs-imager which helps to upload into s3 in few lines:
imager.upload(req.payload.file.path, function (err, cdnUri, files) {
if (err) return req.reply({code: 200, message: err.message})
if (files.length) {
var avatar = { cdnUri : cdnUri, files : files }
}
req.reply({code: 200, image: avatar})
}, 'movie')
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Note that you can't stream directly to S3 upload, you need to write to disk and delete those files (which the upload function does). Also I create a thumbnail as well as an original size, which both get uploaded to my S3 bucket.