Created
January 24, 2023 14:25
-
-
Save allenheltondev/24cf766f2cf065cbedcbea16df850ba9 to your computer and use it in GitHub Desktop.
Update locally referenced images in your blog posts with cloud references in S3
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// This script will scan all files in a given directory for locally referenced images. It will take the images, rename them with a standard convention, then upload them to s3 with a 'public-read' ACL. The script will then update the reference to the local image with the link to the version in the cloud. | |
// If there are any issues during operation, the script will output either a 'skipped-posts.json' file that lists the posts that were not processed or a 'failed-image-uploads.json' file that lists images that failed to upload into S3. | |
// Arguments for operation | |
// | |
// [0] blogPostDirectory - relative path from the root where all blog posts live | |
// [1] imageDirectory - relative path from the root where all images live | |
// [2] bucketName - name of the S3 bucket to upload the local images to | |
// [3] awsProfileName - name of the aws profile on your machine that has access to the S3 bucket | |
// | |
// Example invocation | |
// | |
// node replaceImages.js 'content/blog' static myimagebucket sandbox | |
// | |
const blogPostDirectory = process.argv[2]; // content/blog | |
const imageDirectory = process.argv[3]; // static | |
const bucketName = process.argv[4]; | |
const awsProfileName = process.argv[5] ?? 'default'; | |
const fs = require('fs'); | |
const frontmatter = require('@github-docs/frontmatter'); | |
const AWS = require('aws-sdk'); | |
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3'); | |
async function run() { | |
const credentials = new AWS.SharedIniFileCredentials({ profile: awsProfileName }); | |
const s3 = new S3Client({ credentials: credentials }); | |
const imagesToRemove = []; | |
const skippedPosts = []; | |
const failedImageUploads = []; | |
const posts = fs.readdirSync(blogPostDirectory); | |
for (const post of posts) { | |
console.log(`Processing blog post: ${post}`); | |
let data = fs.readFileSync(`${blogPostDirectory}/${post}`, 'utf8'); | |
const metadata = frontmatter(data); | |
if (!metadata?.data?.title) { | |
console.log(`No title found for post: ${post}. Skipping replacement.`); | |
skippedPosts.push(post); | |
continue; | |
} | |
// Strip title of special characters and make lowercase | |
const prefix = metadata.data.title.toLowerCase().replace(/ /g, '_').replace(/[^\w\s]/gi, '').trim(); | |
if (!metadata?.data?.image) { | |
console.log(`${post} has no featured image.`); | |
} | |
else if (metadata?.data?.image?.startsWith('images/')) { | |
const pieces = metadata.data.image.split('/'); | |
const imageFileName = pieces[pieces.length - 1]; | |
const extension = imageFileName.split('.')[1]; | |
const originalImage = `./${imageDirectory}/${metadata.data.image}`; | |
const response = await s3.send(new PutObjectCommand({ | |
Bucket: bucketName, | |
Key: `${prefix}_feature.${extension}`, | |
ACL: 'public-read', | |
Body: fs.readFileSync(originalImage) | |
})); | |
if (response.ETag) { // Indicates a successful PUT | |
data = data.replace(metadata.data.image, `https://${bucketName}.s3.amazonaws.com/${prefix}_feature.${extension}`); | |
if (!imagesToRemove.includes(originalImage)) { | |
imagesToRemove.push(originalImage); | |
} | |
} else { | |
console.error('Error replacing feature image'); | |
failedImageUploads.push({ | |
post: post, | |
image: metadata.data.image | |
}); | |
} | |
} | |
// look for markdown links that start with (../images/post). Can have N number of leading dots | |
const images = data.matchAll(/(\(\.\..*\/images\/post\/.*\))/g); | |
let count = 0; | |
let allReplacementsSuccessful = true; | |
for (const image of images) { | |
const pieces = image[1].split('/'); | |
const originalFileName = pieces[pieces.length - 1]; | |
const extension = originalFileName.split('.')[1].replace(')', ''); | |
const fileName = `${prefix}_${count++}.${extension}`; | |
const originalImage = `./${imageDirectory}/${pieces.filter(p => !p.includes('..')).join('/')}`.replace(')', ''); | |
const response = await s3.send(new PutObjectCommand({ | |
Bucket: bucketName, | |
Key: fileName, | |
ACL: 'public-read', | |
Body: fs.readFileSync(originalImage) | |
})); | |
if (response.ETag) { | |
data = data.replace(image[1].replace('(', '').replace(')', ''), `https://${bucketName}.s3.amazonaws.com/${fileName}`); | |
if (!imagesToRemove.includes(originalImage)) { | |
imagesToRemove.push(originalImage); | |
} | |
} else { | |
console.error(`Unable to save image to S3 ${fileName}`); | |
failedImageUploads.push({ | |
post: post, | |
image: image[1] | |
}); | |
} | |
} | |
fs.writeFileSync(`${blogPostDirectory}/${post}`, data); | |
console.log(`${post} updated complete.`); | |
} | |
for (const imageToRemove of imagesToRemove) { | |
console.log(`removing ${imageToRemove}`); | |
fs.rmSync(imageToRemove); | |
} | |
if (skippedPosts.length) { | |
fs.writeFileSync('skipped-posts.json', JSON.stringify(skippedPosts, null, 2)); | |
} | |
if (failedImageUploads.length) { | |
fs.writeFileSync('failed-image-uploads.json', JSON.stringify(failedImageUploads, null, 2)); | |
} | |
} | |
run(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment