docker log limit
method 1 -> docker run example
docker run -d \
--log-driver json-file \
--log-opt max-size=5k \
--log-opt max-file=10 \
chentex/random-logger:latest
{ | |
"Version": "2012-10-17", | |
"Statement": [ | |
{ | |
"Effect": "Allow", | |
"Principal": "*", | |
"Action": "s3:GetObject", | |
"Resource": [ | |
"arn:aws:s3:::<bucket-name>/file1.txt", | |
"arn:aws:s3:::<bucket-name>/file2.txt" |
const AWS = require('aws-sdk'); | |
exports.handler = async (event, context) => { | |
console.log(JSON.stringify(event)) | |
// ================================ | |
// Define your backups | |
// ================================ | |
const instanceName = event.instanceName || process.env['instanceName']; | |
const backupDaysMax = event.days || process.env['days']; // keep at least 7 daily backups |
1 | |
Below worked for me in AWS CENTOS 🦊 - Amazon Linux 2 AMIS( Karoo) | |
Step 1 : Update the EBS volume from AWS console of attached EC2 | |
Step 2 : Login( SSH ) to EC2 instance in which the volume is attached |
openssl aes-128-cbc -d -K babb4a9f774ab853c96c2d653dfe544a -iv 00000000000000000000000000000000 -in "${HOME}/Library/DBeaverData/workspace6/General/.dbeaver/credentials-config.json" | dd bs=1 skip=16 2>/dev/null |
docker log limit
method 1 -> docker run example
docker run -d \
--log-driver json-file \
--log-opt max-size=5k \
--log-opt max-file=10 \
chentex/random-logger:latest
git init
git status
git add <filename>
git commit -m "commit message"
git log
git remote add origin
import json | |
import boto3 | |
def lambda_handler(event, context): | |
# TODO implement | |
for e in event['Records']: | |
bucket_name = e['s3']['bucket']['name'] | |
object_key = e['s3']['object']['key'] | |
print(bucket_name) |