Last active
July 10, 2023 04:04
-
-
Save ufuk/81568e3e71ce98fda59061912453431f to your computer and use it in GitHub Desktop.
BASH script to export some tables from the PostgreSQL database to S3 as CSV files.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
export AWS_ACCESS_KEY_ID=... | |
export AWS_SECRET_ACCESS_KEY=... | |
export AWS_DEFAULT_REGION=... | |
PG_HOST=... | |
PG_USER=... | |
PG_PASS='...' | |
PG_DB=... | |
S3_BUCKET=... | |
function export_and_copy_to_s3() { | |
NAME=$1 | |
TABLE_OR_SELECT=$2 | |
LOCAL_FILE=/tmp/${NAME}.csv | |
PGPASSWORD=$PG_PASS psql -h $PG_HOST -U $PG_USER -d $PG_DB -c "COPY $TABLE_OR_SELECT TO stdout CSV DELIMITER ',' quote '\"' force quote *" > $LOCAL_FILE | |
SNAPSHOT_ID="$(date +%Y%m%d)" | |
aws s3 cp $LOCAL_FILE s3://${S3_BUCKET}/${SNAPSHOT_ID}/${NAME}/ | |
rm $LOCAL_FILE | |
} | |
export_and_copy_to_s3 "example_table_1" "public.example_table_1" | |
export_and_copy_to_s3 "example_table_2" "(select column_name_x, column_name_y from public.example_table_2)" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment