-
Star
(114)
You must be signed in to star a gist -
Fork
(20)
You must be signed in to fork a gist
-
-
Save homam/8646090 to your computer and use it in GitHub Desktop.
var AWS = require('aws-sdk'), | |
fs = require('fs'); | |
// For dev purposes only | |
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' }); | |
// Read in the file, convert it to base64, store to S3 | |
fs.readFile('del.txt', function (err, data) { | |
if (err) { throw err; } | |
var base64data = new Buffer(data, 'binary'); | |
var s3 = new AWS.S3(); | |
s3.client.putObject({ | |
Bucket: 'banners-adxs', | |
Key: 'del2.txt', | |
Body: base64data, | |
ACL: 'public-read' | |
},function (resp) { | |
console.log(arguments); | |
console.log('Successfully uploaded package.'); | |
}); | |
}); |
@chandankrishnan I've modified this gist to work with an image. See https://gist.github.com/SylarRuby/b60eea29c1682519e422476cc5357b60
@almostprogrammer, @SylarRuby, @chandankrishnan
I've created a gist that will work with any kind of file and also work with chunks data.
here is the link https://gist.github.com/sarfarazansari/59d5cf4bb3b03acf069396ca92a79b3e
You should never put accessKey and secretKey in your code. Don't ask me why I know that.
simple solution.
`const AWS = require('aws-sdk');
AWS.config.loadFromPath('./config/configS3.json');
s3Bucket = new AWS.S3( { params: {Bucket: 'MY_BUCKET_NAME', timeout: 6000000} } );
fs.readFile('./files/filename.png', function(err, fileData) {
let params = {
ACL: 'public-read',
Key: filename,
Body: fileData,
ContentType: 'binary'
};
s3Bucket.putObject( params, ( error, data ) => {
if( error ) console.log( error );
callaback( null, data );
});
});
`
The Right Way (that actually works)
I'm a senior JS dev and I thought I'd just do a quick search to find a copy/paste snippet for uploading to S3.
What I've found instead (collectively, not specifically this example) has horrified me, so I decided to write my own and share:
First, I recommend using a .env
file for the configuration details (and never commit that file):
.env
:
AWS_ACCESS_KEY=xxxxxxxxxxxxxxxx
AWS_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxx
.gitignore
:
.env
.env.*
And this is the actual code snippet:
'use strict';
// This will read the .env (if it exists) into process.env
require('dotenv').config();
// These values will be either what's in .env,
// or what's in the Docker, Heroku, AWS environment
var AWS_ACCESS_KEY = process.env.AWS_ACCESS_KEY;
var AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
var AWS = require('aws-sdk');
var s3 = new AWS.S3({
accessKeyId: AWS_ACCESS_KEY,
secretAccessKey: AWS_SECRET_ACCESS_KEY
});
var fs = require('fs');
var path = require('path');
function uploadToS3(bucketName, keyPrefix, filePath) {
// ex: /path/to/my-picture.png becomes my-picture.png
var fileName = path.basename(filePath);
var fileStream = fs.createReadStream(filePath);
// If you want to save to "my-bucket/{prefix}/{filename}"
// ex: "my-bucket/my-pictures-folder/my-picture.png"
var keyName = path.join(keyPrefix, fileName);
// We wrap this in a promise so that we can handle a fileStream error
// since it can happen *before* s3 actually reads the first 'data' event
return new Promise(function(resolve, reject) {
fileStream.once('error', reject);
s3.upload(
{
Bucket: bucketName,
Key: keyName,
Body: fileStream
}
).promise().then(resolve, reject);
});
}
Usage:
uploadToS3("my-bucket-name", "", "../../be/careful/pic.jpg").then(function (result) {
console.log("Uploaded to s3:", result.location);
}).catch(function (err) {
console.error("something bad happened:", err.toString());
});
See https://coolaj86.com/articles/upload-to-s3-with-node-the-right-way/ if you've tried and failed a few times with what you've found among the top google results and what to know why pretty much none of them work.
Of course, you could use lots of const
, await
, hashrockets, and arrow functions as well, but I prefer plain JS because it's easy even for novices from other languages to read and understand.
how to get public key once files is uploaded
how to get public key once files is uploaded
public async uploadObject(
body: Buffer,
bucket: string,
key: string,
mimeType: string
) {
const params = {
Bucket: bucket,
Key: key,
ACL: 'public-read',
Body: body,
ContentType: mimeType
};
return await this.s3
.upload(params)
.promise()
.then((data) => data.Location) // <-- public key is available under data.Location
.catch((err) => {
throw new Error(err.message);
});
}
@solderjs Thank you for your kind sharing, which is very helpful for junior devs.
The Right Way (that actually works)
I'm a senior JS dev and I thought I'd just do a quick search to find a copy/paste snippet for uploading to S3. What I've found instead (collectively, not specifically this example) has horrified me, so I decided to write my own and share:
First, I recommend using a
.env
file for the configuration details (and never commit that file):
.env
:AWS_ACCESS_KEY=xxxxxxxxxxxxxxxx AWS_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxx
And this is the actual code snippet:
'use strict'; // This will read the .env (if it exists) into process.env require('dotenv').config(); // These values will be either what's in .env, // or what's in the Docker, Heroku, AWS environment var AWS_ACCESS_KEY = process.env.AWS_ACCESS_KEY; var AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY; var AWS = require('aws-sdk'); var s3 = new AWS.S3({ accessKeyId: AWS_ACCESS_KEY, secretAccessKey: AWS_SECRET_ACCESS_KEY }); var fs = require('fs'); function uploadToS3(bucketName, keyPrefix, filePath) { // ex: /path/to/my-picture.png becomes my-picture.png var fileName = path.basename(filePath); var fileStream = fs.createReadStream(filePath); // If you want to save to "my-bucket/{prefix}/{filename}" // ex: "my-bucket/my-pictures-folder/my-picture.png" var keyName = path.join(keyPrefix, fileName); return new Promise(function(resolve, reject) { fileStream.once('error', reject); s3.upload( { Bucket: bucketName, Key: keyName, Body: fileStream }, function(err, result) { if (err) { reject(err); return; } resolve(result); } ); }); }See https://coolaj86.com/articles/upload-to-s3-with-node-the-right-way/ if you've tried a few times with what you've found the top google results and what to know why... pretty much none of them work.
Of course, you could use lots of
const
,await
, hashrockets, and arrow functions as well, but I prefer plain JS because it's easy even for novices from other languages to read and understand.
In:
var fileName = path.basename(filePath);
Where do u get the path
from?
In:
var fileName = path.basename(filePath);
Where do u get the path from?
The path
is library being called const path = require('path')
. Or if you are asking about the actual path. Then just manually or use a variable to reference the actual path to file.
What about uploading via a URL? I'm trying to figure out how to pass an image URL from the web and upload that image to S3. Many free image API libraries require this (e.g. pixabay).
@evolross You can either open up an http request and download it as a file first, or pipe it.
Honestly, I think it's to your benefit to just do this with the bare node https api via pipes. If you want something quick and dirty for downloads, @root/request will get the job done. Or axios. Or whatever else people are using these days. They don't much matter. I rolled my own to be lightweight, request
compatible, and to have 0 dependencies.
Thanks solderjs, using a fileStream worked perfect for me.
Typescript snippet:
async putFileInBucket(awsS3: AWS.S3, bucketName: string, fileName: string, fileExtension: string, fileStream: fs.ReadStream): Promise<void> {
await awsS3.putObject({
Body: fileStream,
Bucket: bucketName,
Key: `${fileName}.${fileExtension}`
}).promise()
.catch(err: AWSError => {
throw new AWSException(err, "problem uploading file")
})
}
const fileStream = fs.createReadStream(path.join(__dirname, "sample.wav"))
await this.putFileInBucket(awsS3, bucketName, "myFile, "wav", fileStream)
Hi team,
download pdf file from website , zip those pdf files and upload those zip file S3 bucket.
some one help me on that issue.
Hi team,
download pdf file from website , zip those pdf files and upload those zip file S3 bucket.
some one help me on that issue.
using Lambda function with node.js
it works for me :)
uploadContentFromFilePath = (fileName) => {
const fileContent = fs.createReadStream(${fileName}
);
return new Promise(function (resolve, reject) {
fileContent.once('error', reject);
s3.upload(
{
Bucket: 'test-bucket',
Key: ${fileName + '_' + Date.now().toString()}
,
ContentType: 'application/pdf',
ACL: 'public-read',
Body: fileContent
},
function (err, result) {
if (err) {
reject(err);
return;
}
resolve(result.Location);
}
);
});
}
if u get error 'putObject is not defined' , you can write like this ,
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'xxx',
Key: 'xxx',
Body: 'what you want to upload',
},function () {
console.log('Successfully uploaded package.');
});
Thank you, everybody, for your comments 🙌
@coolaj86 Thank you for your snippet, I just wonder how to use it with an input file form?
Thanks
By far the best simple and straightforward code/implementation and brief explanation I've found. Been stuck on this for 2-3 days lol Thanks guys - peace and love
How can I create and get s3 bucket id?
when I hit image url that from s3 bucket then image not showing
it showing error like "Could not load image 'images.png'".
Any help would be highly appreciated