Created
May 11, 2023 22:41
-
-
Save psenger/3ca16def9c4666794f52351a32701446 to your computer and use it in GitHub Desktop.
[S3 ES6 AWS] #AWS #S3
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const AWS = require('aws-sdk') | |
const fs = require("fs").promises; | |
const {join} = require('path') | |
const path = require("path"); | |
const { createReadStream, createHash } = require('crypto') | |
const credentials = { | |
accessKeyId: process.env.AWS_ACCESS_KEY_ID || 'test', | |
secretAccessKey: process.env.AWS_SECRET_KEY || 'test' , | |
} | |
const useLocal = process.env.NODE_ENV !== 'production' | |
const bucketName = process.env.AWS_BUCKET_NAME || 'my-magic-bucket-on-s3' | |
const s3 = new AWS.S3({ | |
...credentials, | |
/** | |
* Using Local Stack to test this out.. | |
* | |
* When working locally, we'll use the Localstack endpoints. This is the one for S3. | |
* A full list of endpoints for each service can be found in the Localstack docs. | |
*/ | |
endpoint: useLocal ? 'http://localhost:4566' : undefined, | |
s3ForcePathStyle: true, | |
}) | |
async function calculateMD5(filePath) { | |
return new Promise((resolve, reject) => { | |
const hash = createHash('md5') | |
const stream = createReadStream(filePath) | |
stream.on('data', (chunk) => { | |
hash.update(chunk); | |
}) | |
.on('end', () => { | |
const md5Hash = hash.digest('hex') | |
resolve(md5Hash); | |
}).on('error', (error) => { | |
reject(error); | |
}); | |
}); | |
} | |
async function* readDirectory(path) { | |
try { | |
const files = await fs.readdir(path) | |
for (const file of files) { | |
const filePath = join(path, file) | |
const stats = await fs.stat(filePath) | |
const md5Hash = await calculateMD5(filePath) | |
yield { file, stats , md5Hash } | |
} | |
} catch (error) { | |
throw new Error(`Failed to read directory: ${error.message}`) | |
} | |
} | |
const upload = module.exports.upload = async (Bucket, name, Body,) => s3.upload({ | |
Bucket, | |
Key: `${bucketName}/${name}`, | |
Body | |
}).promise() | |
const getObject = module.exports.getObject = async (Bucket, name) => s3.getObject({ | |
Bucket, | |
Key: `${Bucket}/${name}` | |
}).promise() | |
const listObjects = module.exports.listObjects = async (Bucket, MaxKeys = 1000) => { | |
const response = s3.listObjects({ | |
Bucket, | |
MaxKeys | |
}).promise() | |
if ( response && response.Contents ) { | |
const { Contents } = response | |
response.Contents = Contents.map((object) => ({ | |
...object, | |
ETag: object.ETag.replace(/"/g, ''), // Remove double quotes from ETag | |
})); | |
} | |
return response | |
} | |
const isFileInSync = function isFileInSync(localFileName, localModifyDate, remoteFileName, remoteModifyDate) { | |
if (localFileName === remoteFileName | |
&& localModifyDate !== null && remoteModifyDate !== null | |
&& localModifyDate.getTime() === remoteModifyDate.getTime()) { | |
return 'skip'; | |
} else if (localFileName === remoteFileName | |
&& localModifyDate !== null && remoteModifyDate !== null | |
&& localModifyDate.getTime() > remoteModifyDate.getTime()) { | |
return 'push'; // Local file is newer, needs to be pushed to remote | |
} else if (localFileName === remoteFileName | |
&& localModifyDate !== null && remoteModifyDate !== null | |
&& localModifyDate.getTime() < remoteModifyDate.getTime()) { | |
return 'pull'; // Remote file is newer, needs to be pulled from remote | |
} else if (localFileName === null && remoteFileName !== null) { | |
return 'pull'; // Local file is missing, needs to be pulled from remote | |
} else if (localFileName !== null && remoteFileName === null) { | |
return 'push'; // Remote file is missing, needs to be pushed to remote | |
} | |
throw new Error('Unknown state'); | |
} | |
const syncObjects = module.exports.listObjects = async (Bucket, directoryPath = join(__dirname, '..', '..', 'BoomBoomDirectory'), MaxKeys = 10000) => { | |
let s3KeyMd5 = {} | |
const s3Files = await listObjects(bucketName) | |
if ( s3Files && s3Files.Contents ) { | |
console.log('calculating the md5 on the s3 files'); | |
let count = 0 | |
s3KeyMd5 = s3Files.Contents.reduce((acc, {Key, LastModified, ETag}) => { | |
count++ | |
const [,fileName] = Key.split('/') | |
// acc[fileName] = new Date(LastModified) | |
acc[fileName] = ETag | |
return acc | |
}, {}) | |
console.log(`${count} files found in s3`); | |
} else { | |
console.error('no content from s3'); | |
} | |
const listOfFileToPush = new Set() | |
const listOfFilesToPull = new Set() | |
for await (const { file: localFileName , stats} of readDirectory(directoryPath)) { | |
if ( !localFileName || localFileName.startsWith('.')) { | |
continue | |
} | |
const localModifyDate = new Date(stats.mtime) | |
// stopped here | |
} | |
} | |
const run = async () => { | |
// const encoding = 'utf8' // this is default for nodejs, just put it here for clarity | |
// const localBoomBoomFile = (await fs.readFile(join(__dirname, '..', '..', 'BoomBoomDirectory'))).toString(encoding) | |
// await upload(bucketName, 'BoomBoomFile', localBoomBoomFile ) | |
// const dir = await listObjects(bucketName) | |
// console.log(JSON.stringify(dir, null, 4)); | |
// const dwnLd = await getObject(bucketName, 'BoomBoomFile') | |
// console.log('dwnLd', dwnLd); | |
// console.log('dwnLd.Body', dwnLd.Body.toString(encoding)); | |
await syncObjects(bucketName) | |
} | |
run().then(console.log).catch(console.error).then(() => process.exit(0)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment