Last active
June 16, 2019 11:30
-
-
Save rajatbarman/84b7c1a441ac37b3cee4c75b227f765e to your computer and use it in GitHub Desktop.
Lambda function for Imagemin to optimize JPEGs and PNGs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
Copies Original File to originals/ folder in the same bucket | |
Optimizes jpeg/png, writes the optimized jpeg/png in the same path | |
If optimized jpeg/png size is somehow greater than the original, optimization is skipped. | |
Webp version of the optimized jpeg/png is created and written in webps folder in the same bucket | |
If webp version size is greater than optimized jpeg/png, webp write is skipped. | |
For preparing imagemin binaries for the lambda runtime environment | |
See https://hub.docker.com/r/rajatbarman/imagemin-lambda | |
After optimization it removes all the s3 buckets in your AWS account. (Joking) | |
*/ | |
const AWS = require('aws-sdk'); | |
const path = require('path'); | |
const util = require('util'); | |
const imagemin = require('imagemin'); | |
const imageminPngquant = require('imagemin-pngquant'); | |
const imageminJpegoptim = require('imagemin-jpegoptim'); | |
const imageminWebp = require('imagemin-webp'); | |
const s3 = new AWS.S3(); | |
function putObject(params, optimizedImageBuffer) { | |
return new Promise((resolve, reject) => { | |
s3.putObject(params).promise() | |
.then((response) => { | |
resolve(Object.assign(response, { optimizedImageBuffer })); | |
}) | |
.catch(reject); | |
}) | |
} | |
function getWebpFileName(objectKey) { | |
const split = objectKey.split('.'); | |
return `${split.slice(0, split.length - 1).join('.')}.webp`; | |
} | |
function optimizeImageBuffer(response) { | |
if (!response) { | |
return; | |
} | |
const plugins = { | |
'image/jpeg': [imageminJpegoptim({ max: 80 })], | |
'image/png': [imageminPngquant({ quality: [0.8, 0.9] })] | |
}[response.ContentType]; | |
return new Promise((resolve, reject) => { | |
imagemin.buffer(response.Body, { | |
plugins | |
}) | |
.then((optimizedImageBuffer) => { | |
resolve({ optimizedImageBuffer, response }); | |
}) | |
.catch(reject); | |
}); | |
} | |
function getWebpImageBuffer(optimizedImageBuffer) { | |
if (!optimizedImageBuffer) { | |
return; | |
} | |
return new Promise((resolve, reject) => { | |
imagemin.buffer(optimizedImageBuffer, { | |
plugins: [imageminWebp()] | |
}) | |
.then((webpImageBuffer) => { | |
resolve({ webpImageBuffer, optimizedImageBuffer }); | |
}) | |
.catch(reject); | |
}); | |
} | |
function processObject(bucket, objectKey) { | |
const absoluteImagePath = `${bucket}/${objectKey}`; | |
if (objectKey.indexOf('originals/') !== -1) { | |
/* | |
The function is triggered from originals folder | |
Don't do anything | |
*/ | |
return; | |
} | |
console.log('Input: ', absoluteImagePath); | |
/* Copy s3 object to originals folder */ | |
const fetchMetadataParams = { | |
Bucket: bucket, | |
Key: objectKey, | |
}; | |
const copyParams = { | |
Bucket: bucket, | |
CopySource: absoluteImagePath, | |
Key: `originals/${objectKey}` | |
}; | |
return s3.headObject(fetchMetadataParams).promise() | |
.then((response) => { | |
if (response.Metadata && response.Metadata.is_processed) { | |
return Promise.reject('Already processed'); | |
} | |
return s3.copyObject(copyParams).promise(); | |
}) | |
.then(() => { | |
console.log('Progress: Copied object to originals folder', absoluteImagePath); | |
return s3 | |
.getObject({ Bucket: bucket, Key: objectKey }) | |
.promise(); | |
}) | |
.then((response) => { | |
console.log('Progress: Got the s3 object', absoluteImagePath); | |
return optimizeImageBuffer(response); | |
}) | |
.then(({ optimizedImageBuffer, response }) => { | |
const optimizedImageLength = Buffer.from(optimizedImageBuffer).length; | |
const originalImageLength = Buffer.from(response.Body).length; | |
if (optimizedImageLength >= originalImageLength) { | |
console.log( | |
'Error: Optimized Image size is greater than original image size', | |
absoluteImagePath, | |
optimizedImageLength, | |
originalImageLength | |
); | |
/* Resolving promise to let the webp process happen */ | |
return Promise.resolve({ optimizedImageBuffer, error: true }); | |
} | |
console.log('Progress: Optimized Image', absoluteImagePath); | |
return putObject({ | |
Metadata: { | |
is_processed: 'true', | |
}, | |
Bucket: bucket, | |
Key: objectKey, | |
Body: optimizedImageBuffer, | |
ContentType: response.ContentType, | |
}, optimizedImageBuffer); | |
}) | |
.then(({ optimizedImageBuffer, error }) => { | |
if (!error) { | |
console.log('Success: Successfully wrote optimized image in s3', absoluteImagePath); | |
} | |
return getWebpImageBuffer(optimizedImageBuffer); | |
}) | |
.then(({ webpImageBuffer, optimizedImageBuffer }) => { | |
const optimizedImageLength = Buffer.from(optimizedImageBuffer).length; | |
const webpImageLength = Buffer.from(webpImageBuffer).length; | |
if (webpImageLength >= optimizedImageLength) { | |
return Promise.reject('Webp image length is greater than jpeg/png length, not writing webp.'); | |
} | |
console.log('Progress: Created Webp Buffer', absoluteImagePath); | |
return s3.putObject({ | |
Bucket: bucket, | |
Key: `webps/${getWebpFileName(objectKey)}`, | |
Body: webpImageBuffer, | |
ContentType: 'image/webp', | |
}).promise(); | |
}) | |
.then(() => { | |
console.log('Success: Successfully wrote webp image in s3', absoluteImagePath); | |
}) | |
.catch((err) => { | |
// Failed to copy original object, not proceeding further | |
console.log('Error: ', absoluteImagePath, err); | |
}); | |
} | |
module.exports.optimizeImage = event => { | |
console.log('Reading options from event:\n', util.inspect(event, { depth: 5 })); | |
event.Records.forEach(record => { | |
const { s3: s3Obj } = record; | |
const bucket = s3Obj.bucket.name; | |
const objectKey = decodeURIComponent(s3Obj.object.key.replace(/\+/g, ' ')); | |
processObject(bucket, objectKey); | |
}); | |
return {}; | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment